From d651d9ca27a42af2c10d7ef8ec8b4a017e382002 Mon Sep 17 00:00:00 2001 From: speakeasybot Date: Tue, 12 Sep 2023 16:22:44 +0000 Subject: [PATCH] ci: regenerated with OpenAPI Doc 1.0.0, Speakeay CLI 1.82.5 --- README.md | 82 +---- RELEASES.md | 12 +- USAGE.md | 80 +---- docs/models/shared/ConnectionsResponse.md | 2 - docs/models/shared/DestinationBigquery.md | 5 +- docs/models/shared/DestinationMilvus.md | 13 + .../DestinationMilvusEmbeddingCohere.md | 11 + .../DestinationMilvusEmbeddingCohereMode.md | 8 + .../shared/DestinationMilvusEmbeddingFake.md | 10 + .../DestinationMilvusEmbeddingFakeMode.md | 8 + .../DestinationMilvusEmbeddingFromField.md | 12 + ...DestinationMilvusEmbeddingFromFieldMode.md | 8 + .../DestinationMilvusEmbeddingOpenAI.md | 11 + .../DestinationMilvusEmbeddingOpenAIMode.md | 8 + .../shared/DestinationMilvusIndexing.md | 15 + .../DestinationMilvusIndexingAuthAPIToken.md | 11 + ...stinationMilvusIndexingAuthAPITokenMode.md | 8 + .../DestinationMilvusIndexingAuthNoAuth.md | 10 + ...DestinationMilvusIndexingAuthNoAuthMode.md | 8 + ...tionMilvusIndexingAuthUsernamePassword.md} | 8 +- ...nMilvusIndexingAuthUsernamePasswordMode.md | 8 + docs/models/shared/DestinationMilvusMilvus.md | 8 + .../DestinationMilvusProcessingConfigModel.md | 11 + docs/models/shared/DestinationPinecone.md | 13 + .../DestinationPineconeEmbeddingCohere.md | 11 + .../DestinationPineconeEmbeddingCohereMode.md | 8 + .../DestinationPineconeEmbeddingFake.md | 10 + .../DestinationPineconeEmbeddingFakeMode.md | 8 + .../DestinationPineconeEmbeddingOpenAI.md | 11 + .../DestinationPineconeEmbeddingOpenAIMode.md | 8 + .../shared/DestinationPineconeIndexing.md | 12 + .../shared/DestinationPineconePinecone.md | 8 + ...estinationPineconeProcessingConfigModel.md | 11 + docs/models/shared/DestinationSnowflake.md | 3 +- docs/models/shared/DestinationTypesense.md | 2 +- docs/models/shared/DestinationsResponse.md | 2 - docs/models/shared/Intercom.md | 8 +- docs/models/shared/JobStatusEnum.md | 2 - docs/models/shared/JobsResponse.md | 2 - docs/models/shared/LinkedinAdsCredentials.md | 8 +- docs/models/shared/SchemeBasicAuth.md | 9 + docs/models/shared/Security.md | 7 +- docs/models/shared/SourceAmazonAds.md | 3 +- docs/models/shared/SourceApifyDataset.md | 11 +- docs/models/shared/SourceAppfollow.md | 11 +- docs/models/shared/SourceAuth0.md | 3 +- ...SourceAuth0CredentialsOAuth2AccessToken.md | 8 +- docs/models/shared/SourceDatadog.md | 18 - docs/models/shared/SourceDatadogDatadog.md | 8 - docs/models/shared/SourceDatadogQueries.md | 10 - .../shared/SourceDatadogQueriesDataSource.md | 13 - docs/models/shared/SourceDatadogSite.md | 14 - docs/models/shared/SourceGoogleAds.md | 4 +- .../shared/SourceGoogleSearchConsole.md | 19 +- ...ceGoogleSearchConsoleCustomReportConfig.md | 9 + ...archConsoleCustomReportConfigValidEnums.md | 14 + .../SourceGoogleSearchConsoleDataFreshness.md | 11 + .../SourceGoogleSearchConsoleDataState.md | 11 - docs/models/shared/SourceGoogleSheets.md | 1 - docs/models/shared/SourceIntercom.md | 2 + docs/models/shared/SourceLemlist.md | 2 +- docs/models/shared/SourceLinkedinAds.md | 14 +- ...nkedinAdsAdAnalyticsReportConfiguration.md | 10 +- ...lyticsReportConfigurationPivotCategory.md} | 4 +- ...yticsReportConfigurationTimeGranularity.md | 6 +- ...SourceLinkedinAdsCredentialsAccessToken.md | 8 +- .../SourceLinkedinAdsCredentialsOAuth20.md | 12 +- docs/models/shared/SourceMssql.md | 26 +- ...cationMethodLogicalReplicationCDCMethod.md | 8 - ...odReadChangesUsingChangeDataCaptureCDC.md} | 10 +- ...gesUsingChangeDataCaptureCDCDataToSync.md} | 2 +- ...aptureCDCInitialSnapshotIsolationLevel.md} | 2 +- ...dChangesUsingChangeDataCaptureCDCMethod.md | 8 + ...nMethodScanChangesWithUserDefinedCursor.md | 10 + ...ScanChangesWithUserDefinedCursorMethod.md} | 2 +- .../SourceMssqlReplicationMethodStandard.md | 10 - .../shared/SourceOpenweatherLanguage.md | 58 ---- .../shared/SourceOpenweatherOpenweather.md | 8 - docs/models/shared/SourceOpenweatherUnits.md | 12 - docs/models/shared/SourcePostgres.md | 2 +- ...MethodDetectChangesWithXminSystemColumn.md | 10 + ...DetectChangesWithXminSystemColumnMethod.md | 8 + ...nMethodScanChangesWithUserDefinedCursor.md | 10 + ...ScanChangesWithUserDefinedCursorMethod.md} | 2 +- ...SourcePostgresReplicationMethodStandard.md | 10 - ...gresReplicationMethodStandardXminMethod.md | 8 - docs/models/shared/SourcePosthog.md | 1 + docs/models/shared/SourcePublicApis.md | 10 - .../shared/SourcePublicApisPublicApis.md | 8 - docs/models/shared/SourceS3.md | 25 +- ...er.md => SourceS3FileBasedStreamConfig.md} | 24 +- ...S3FileBasedStreamConfigFormatAvroFormat.md | 11 + ...sedStreamConfigFormatAvroFormatFiletype.md | 8 + ...eS3FileBasedStreamConfigFormatCSVFormat.md | 23 ++ ...asedStreamConfigFormatCSVFormatFiletype.md | 8 + ...tCSVFormatHeaderDefinitionAutogenerated.md | 10 + ...nitionAutogeneratedHeaderDefinitionType.md | 8 + ...gFormatCSVFormatHeaderDefinitionFromCSV.md | 10 + ...erDefinitionFromCSVHeaderDefinitionType.md | 8 + ...atCSVFormatHeaderDefinitionUserProvided.md | 11 + ...initionUserProvidedHeaderDefinitionType.md | 8 + ...treamConfigFormatCSVFormatInferenceType.md | 11 + ...3FileBasedStreamConfigFormatJsonlFormat.md | 10 + ...edStreamConfigFormatJsonlFormatFiletype.md | 8 + ...ileBasedStreamConfigFormatParquetFormat.md | 11 + ...StreamConfigFormatParquetFormatFiletype.md | 8 + ...S3FileBasedStreamConfigValidationPolicy.md | 12 + .../shared/SourceS3S3AmazonWebServices.md | 4 +- docs/models/shared/SourceShopify.md | 2 +- docs/models/shared/SourceStripe.md | 16 +- docs/models/shared/SourceZendeskSunshine.md | 2 +- ...ourceZendeskSunshineCredentialsAPIToken.md | 10 + ...skSunshineCredentialsAPITokenAuthMethod.md | 8 + ...SourceZendeskSunshineCredentialsOAuth20.md | 11 + ...eskSunshineCredentialsOAuth20AuthMethod.md | 8 + docs/models/shared/SourceZendeskSupport.md | 2 +- docs/models/shared/SourcesResponse.md | 2 - docs/models/shared/WorkspacesResponse.md | 2 - docs/sdks/airbyte/README.md | 2 + docs/sdks/connections/README.md | 217 +++--------- docs/sdks/destinations/README.md | 225 ++++++++++--- docs/sdks/jobs/README.md | 58 ++-- docs/sdks/sources/README.md | 314 ++++++++++-------- docs/sdks/streams/README.md | 9 +- docs/sdks/workspaces/README.md | 67 ++-- files.gen | 182 +++++++--- gen.yaml | 8 +- lib/build.gradle | 2 +- .../main/java/com/airbyte/api/Airbyte.java | 3 + .../com/airbyte/api/SDKConfiguration.java | 4 +- .../models/shared/ConnectionsResponse.java | 3 - .../models/shared/DestinationBigquery.java | 14 +- .../api/models/shared/DestinationMilvus.java | 58 ++++ .../DestinationMilvusEmbeddingCohere.java | 36 ++ ...DestinationMilvusEmbeddingCohereMode.java} | 6 +- .../DestinationMilvusEmbeddingFake.java | 26 ++ ...> DestinationMilvusEmbeddingFakeMode.java} | 6 +- .../DestinationMilvusEmbeddingFromField.java | 51 +++ ...stinationMilvusEmbeddingFromFieldMode.java | 18 + .../DestinationMilvusEmbeddingOpenAI.java | 36 ++ .../DestinationMilvusEmbeddingOpenAIMode.java | 18 + .../shared/DestinationMilvusIndexing.java | 90 +++++ ...DestinationMilvusIndexingAuthAPIToken.java | 39 +++ ...inationMilvusIndexingAuthAPITokenMode.java | 18 + .../DestinationMilvusIndexingAuthNoAuth.java | 26 ++ ...stinationMilvusIndexingAuthNoAuthMode.java | 18 + ...ionMilvusIndexingAuthUsernamePassword.java | 51 +++ ...ilvusIndexingAuthUsernamePasswordMode.java | 18 + ...adog.java => DestinationMilvusMilvus.java} | 6 +- ...estinationMilvusProcessingConfigModel.java | 63 ++++ .../models/shared/DestinationPinecone.java | 58 ++++ .../DestinationPineconeEmbeddingCohere.java | 36 ++ ...estinationPineconeEmbeddingCohereMode.java | 18 + .../DestinationPineconeEmbeddingFake.java | 26 ++ .../DestinationPineconeEmbeddingFakeMode.java | 18 + .../DestinationPineconeEmbeddingOpenAI.java | 36 ++ ...estinationPineconeEmbeddingOpenAIMode.java | 18 + .../shared/DestinationPineconeIndexing.java | 49 +++ ....java => DestinationPineconePinecone.java} | 6 +- ...tinationPineconeProcessingConfigModel.java | 63 ++++ .../models/shared/DestinationSnowflake.java | 14 +- .../models/shared/DestinationTypesense.java | 4 +- .../models/shared/DestinationsResponse.java | 3 - .../airbyte/api/models/shared/Intercom.java | 6 + .../api/models/shared/JobStatusEnum.java | 3 - .../api/models/shared/JobsResponse.java | 3 - .../models/shared/LinkedinAdsCredentials.java | 4 +- .../api/models/shared/SchemeBasicAuth.java | 32 ++ .../airbyte/api/models/shared/Security.java | 13 +- .../api/models/shared/SourceAmazonAds.java | 14 +- .../api/models/shared/SourceApifyDataset.java | 16 +- .../api/models/shared/SourceAppfollow.java | 44 +-- .../api/models/shared/SourceAuth0.java | 14 + .../api/models/shared/SourceDatadog.java | 123 ------- .../models/shared/SourceDatadogQueries.java | 49 --- .../SourceDatadogQueriesDataSource.java | 24 -- .../api/models/shared/SourceDatadogSite.java | 25 -- .../api/models/shared/SourceGoogleAds.java | 8 +- .../shared/SourceGoogleSearchConsole.java | 30 +- ...GoogleSearchConsoleCustomReportConfig.java | 37 +++ ...chConsoleCustomReportConfigValidEnums.java | 25 ++ ...ourceGoogleSearchConsoleDataFreshness.java | 22 ++ .../SourceGoogleSearchConsoleDataState.java | 22 -- .../api/models/shared/SourceGoogleSheets.java | 12 - .../api/models/shared/SourceIntercom.java | 26 ++ .../api/models/shared/SourceLemlist.java | 2 +- .../api/models/shared/SourceLinkedinAds.java | 4 +- ...edinAdsAdAnalyticsReportConfiguration.java | 16 +- ...ticsReportConfigurationPivotCategory.java} | 6 +- ...icsReportConfigurationTimeGranularity.java | 6 +- ...urceLinkedinAdsCredentialsAccessToken.java | 2 +- .../SourceLinkedinAdsCredentialsOAuth20.java | 6 +- .../api/models/shared/SourceMssql.java | 2 +- ...MethodLogicalReplicationCDCDataToSync.java | 22 -- ...ationCDCInitialSnapshotIsolationLevel.java | 22 -- ...ReadChangesUsingChangeDataCaptureCDC.java} | 20 +- ...esUsingChangeDataCaptureCDCDataToSync.java | 22 ++ ...ptureCDCInitialSnapshotIsolationLevel.java | 22 ++ ...hangesUsingChangeDataCaptureCDCMethod.java | 18 + ...ethodScanChangesWithUserDefinedCursor.java | 25 ++ ...canChangesWithUserDefinedCursorMethod.java | 18 + .../SourceMssqlReplicationMethodStandard.java | 25 -- .../api/models/shared/SourceOpenweather.java | 87 ----- .../shared/SourceOpenweatherLanguage.java | 69 ---- .../models/shared/SourceOpenweatherUnits.java | 23 -- .../api/models/shared/SourcePostgres.java | 2 +- ...thodDetectChangesWithXminSystemColumn.java | 25 ++ ...tectChangesWithXminSystemColumnMethod.java | 18 + ...ethodScanChangesWithUserDefinedCursor.java | 25 ++ ...canChangesWithUserDefinedCursorMethod.java | 18 + ...urcePostgresReplicationMethodStandard.java | 25 -- ...PostgresReplicationMethodStandardXmin.java | 25 -- .../api/models/shared/SourcePosthog.java | 12 + .../api/models/shared/SourcePublicApis.java | 25 -- .../airbyte/api/models/shared/SourceS3.java | 100 +++++- .../shared/SourceS3FileBasedStreamConfig.java | 135 ++++++++ ...FileBasedStreamConfigFormatAvroFormat.java | 38 +++ ...dStreamConfigFormatAvroFormatFiletype.java | 18 + ...3FileBasedStreamConfigFormatCSVFormat.java | 182 ++++++++++ ...dStreamConfigFormatCSVFormatFiletype.java} | 6 +- ...SVFormatHeaderDefinitionAutogenerated.java | 26 ++ ...tionAutogeneratedHeaderDefinitionType.java | 18 + ...ormatCSVFormatHeaderDefinitionFromCSV.java | 26 ++ ...DefinitionFromCSVHeaderDefinitionType.java | 18 + ...CSVFormatHeaderDefinitionUserProvided.java | 39 +++ ...itionUserProvidedHeaderDefinitionType.java | 18 + ...eamConfigFormatCSVFormatInferenceType.java | 22 ++ ...ileBasedStreamConfigFormatJsonlFormat.java | 26 ++ ...treamConfigFormatJsonlFormatFiletype.java} | 6 +- ...eBasedStreamConfigFormatParquetFormat.java | 38 +++ ...reamConfigFormatParquetFormatFiletype.java | 18 + ...FileBasedStreamConfigValidationPolicy.java | 23 ++ .../shared/SourceS3S3AmazonWebServices.java | 7 +- .../api/models/shared/SourceShopify.java | 4 +- .../api/models/shared/SourceStripe.java | 6 +- .../models/shared/SourceZendeskSunshine.java | 13 +- ...rceZendeskSunshineCredentialsAPIToken.java | 46 +++ ...SunshineCredentialsAPITokenAuthMethod.java | 18 + ...urceZendeskSunshineCredentialsOAuth20.java | 58 ++++ ...SunshineCredentialsOAuth20AuthMethod.java} | 6 +- .../models/shared/SourceZendeskSupport.java | 4 +- .../api/models/shared/SourcesResponse.java | 3 - .../api/models/shared/WorkspacesResponse.java | 3 - 243 files changed, 3635 insertions(+), 1707 deletions(-) create mode 100755 docs/models/shared/DestinationMilvus.md create mode 100755 docs/models/shared/DestinationMilvusEmbeddingCohere.md create mode 100755 docs/models/shared/DestinationMilvusEmbeddingCohereMode.md create mode 100755 docs/models/shared/DestinationMilvusEmbeddingFake.md create mode 100755 docs/models/shared/DestinationMilvusEmbeddingFakeMode.md create mode 100755 docs/models/shared/DestinationMilvusEmbeddingFromField.md create mode 100755 docs/models/shared/DestinationMilvusEmbeddingFromFieldMode.md create mode 100755 docs/models/shared/DestinationMilvusEmbeddingOpenAI.md create mode 100755 docs/models/shared/DestinationMilvusEmbeddingOpenAIMode.md create mode 100755 docs/models/shared/DestinationMilvusIndexing.md create mode 100755 docs/models/shared/DestinationMilvusIndexingAuthAPIToken.md create mode 100755 docs/models/shared/DestinationMilvusIndexingAuthAPITokenMode.md create mode 100755 docs/models/shared/DestinationMilvusIndexingAuthNoAuth.md create mode 100755 docs/models/shared/DestinationMilvusIndexingAuthNoAuthMode.md rename docs/models/shared/{SourcePostgresReplicationMethodStandardXmin.md => DestinationMilvusIndexingAuthUsernamePassword.md} (50%) create mode 100755 docs/models/shared/DestinationMilvusIndexingAuthUsernamePasswordMode.md create mode 100755 docs/models/shared/DestinationMilvusMilvus.md create mode 100755 docs/models/shared/DestinationMilvusProcessingConfigModel.md create mode 100755 docs/models/shared/DestinationPinecone.md create mode 100755 docs/models/shared/DestinationPineconeEmbeddingCohere.md create mode 100755 docs/models/shared/DestinationPineconeEmbeddingCohereMode.md create mode 100755 docs/models/shared/DestinationPineconeEmbeddingFake.md create mode 100755 docs/models/shared/DestinationPineconeEmbeddingFakeMode.md create mode 100755 docs/models/shared/DestinationPineconeEmbeddingOpenAI.md create mode 100755 docs/models/shared/DestinationPineconeEmbeddingOpenAIMode.md create mode 100755 docs/models/shared/DestinationPineconeIndexing.md create mode 100755 docs/models/shared/DestinationPineconePinecone.md create mode 100755 docs/models/shared/DestinationPineconeProcessingConfigModel.md create mode 100755 docs/models/shared/SchemeBasicAuth.md delete mode 100755 docs/models/shared/SourceDatadog.md delete mode 100755 docs/models/shared/SourceDatadogDatadog.md delete mode 100755 docs/models/shared/SourceDatadogQueries.md delete mode 100755 docs/models/shared/SourceDatadogQueriesDataSource.md delete mode 100755 docs/models/shared/SourceDatadogSite.md create mode 100755 docs/models/shared/SourceGoogleSearchConsoleCustomReportConfig.md create mode 100755 docs/models/shared/SourceGoogleSearchConsoleCustomReportConfigValidEnums.md create mode 100755 docs/models/shared/SourceGoogleSearchConsoleDataFreshness.md delete mode 100755 docs/models/shared/SourceGoogleSearchConsoleDataState.md rename docs/models/shared/{SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy.md => SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory.md} (86%) delete mode 100755 docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCMethod.md rename docs/models/shared/{SourceMssqlReplicationMethodLogicalReplicationCDC.md => SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC.md} (89%) rename docs/models/shared/{SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync.md => SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync.md} (84%) rename docs/models/shared/{SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel.md => SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel.md} (85%) create mode 100755 docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod.md create mode 100755 docs/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor.md rename docs/models/shared/{SourceMssqlReplicationMethodStandardMethod.md => SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod.md} (58%) delete mode 100755 docs/models/shared/SourceMssqlReplicationMethodStandard.md delete mode 100755 docs/models/shared/SourceOpenweatherLanguage.md delete mode 100755 docs/models/shared/SourceOpenweatherOpenweather.md delete mode 100755 docs/models/shared/SourceOpenweatherUnits.md create mode 100755 docs/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn.md create mode 100755 docs/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod.md create mode 100755 docs/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor.md rename docs/models/shared/{SourcePostgresReplicationMethodStandardMethod.md => SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod.md} (57%) delete mode 100755 docs/models/shared/SourcePostgresReplicationMethodStandard.md delete mode 100755 docs/models/shared/SourcePostgresReplicationMethodStandardXminMethod.md delete mode 100755 docs/models/shared/SourcePublicApis.md delete mode 100755 docs/models/shared/SourcePublicApisPublicApis.md rename docs/models/shared/{SourceOpenweather.md => SourceS3FileBasedStreamConfig.md} (52%) create mode 100755 docs/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormat.md create mode 100755 docs/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormatFiletype.md create mode 100755 docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormat.md create mode 100755 docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatFiletype.md create mode 100755 docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated.md create mode 100755 docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType.md create mode 100755 docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV.md create mode 100755 docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType.md create mode 100755 docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided.md create mode 100755 docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType.md create mode 100755 docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType.md create mode 100755 docs/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormat.md create mode 100755 docs/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype.md create mode 100755 docs/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormat.md create mode 100755 docs/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormatFiletype.md create mode 100755 docs/models/shared/SourceS3FileBasedStreamConfigValidationPolicy.md create mode 100755 docs/models/shared/SourceZendeskSunshineCredentialsAPIToken.md create mode 100755 docs/models/shared/SourceZendeskSunshineCredentialsAPITokenAuthMethod.md create mode 100755 docs/models/shared/SourceZendeskSunshineCredentialsOAuth20.md create mode 100755 docs/models/shared/SourceZendeskSunshineCredentialsOAuth20AuthMethod.md create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvus.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingCohere.java rename lib/src/main/java/com/airbyte/api/models/shared/{SourceMssqlReplicationMethodStandardMethod.java => DestinationMilvusEmbeddingCohereMode.java} (62%) create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFake.java rename lib/src/main/java/com/airbyte/api/models/shared/{SourceOpenweatherOpenweather.java => DestinationMilvusEmbeddingFakeMode.java} (66%) create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFromField.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFromFieldMode.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingOpenAI.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingOpenAIMode.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexing.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthAPIToken.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthAPITokenMode.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthNoAuth.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthNoAuthMode.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthUsernamePassword.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthUsernamePasswordMode.java rename lib/src/main/java/com/airbyte/api/models/shared/{SourceDatadogDatadog.java => DestinationMilvusMilvus.java} (69%) create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusProcessingConfigModel.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationPinecone.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingCohere.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingCohereMode.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingFake.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingFakeMode.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingOpenAI.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingOpenAIMode.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeIndexing.java rename lib/src/main/java/com/airbyte/api/models/shared/{SourcePublicApisPublicApis.java => DestinationPineconePinecone.java} (66%) create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeProcessingConfigModel.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SchemeBasicAuth.java delete mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceDatadog.java delete mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogQueries.java delete mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogQueriesDataSource.java delete mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogSite.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleCustomReportConfig.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleCustomReportConfigValidEnums.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleDataFreshness.java delete mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleDataState.java rename lib/src/main/java/com/airbyte/api/models/shared/{SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy.java => SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory.java} (79%) delete mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync.java delete mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel.java rename lib/src/main/java/com/airbyte/api/models/shared/{SourceMssqlReplicationMethodLogicalReplicationCDC.java => SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC.java} (54%) create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod.java delete mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodStandard.java delete mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweather.java delete mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweatherLanguage.java delete mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweatherUnits.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod.java delete mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandard.java delete mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandardXmin.java delete mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourcePublicApis.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfig.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormat.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormatFiletype.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormat.java rename lib/src/main/java/com/airbyte/api/models/shared/{SourcePostgresReplicationMethodStandardMethod.java => SourceS3FileBasedStreamConfigFormatCSVFormatFiletype.java} (61%) create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormat.java rename lib/src/main/java/com/airbyte/api/models/shared/{SourceMssqlReplicationMethodLogicalReplicationCDCMethod.java => SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype.java} (59%) create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormat.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormatFiletype.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigValidationPolicy.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsAPIToken.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsAPITokenAuthMethod.java create mode 100755 lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsOAuth20.java rename lib/src/main/java/com/airbyte/api/models/shared/{SourcePostgresReplicationMethodStandardXminMethod.java => SourceZendeskSunshineCredentialsOAuth20AuthMethod.java} (63%) diff --git a/README.md b/README.md index 55ad53505..6f5f7fd4b 100755 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ The Developer Portal UI can also be used to help build your integration by showi ### Gradle ```groovy -implementation 'com.airbyte.api:public-api:0.39.1' +implementation 'com.airbyte.api:public-api:0.39.2' ``` @@ -50,71 +50,25 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("corrupti") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("corrupti", "provident") {{ + password = ""; + username = ""; + }}; }}) .build(); - com.airbyte.api.models.shared.ConnectionCreateRequest req = new ConnectionCreateRequest("9bd9d8d6-9a67-44e0-b467-cc8796ed151a", "05dfc2dd-f7cc-478c-a1ba-928fc816742c") {{ + com.airbyte.api.models.shared.ConnectionCreateRequest req = new ConnectionCreateRequest("bd9d8d69-a674-4e0f-867c-c8796ed151a0", "5dfc2ddf-7cc7-48ca-9ba9-28fc816742cb") {{ configurations = new StreamConfigurations() {{ streams = new com.airbyte.api.models.shared.StreamConfiguration[]{{ - add(new StreamConfiguration("saepe") {{ + add(new StreamConfiguration("dolor") {{ cursorField = new String[]{{ - add("ipsum"), - add("excepturi"), + add("esse"), }}; - name = "Dorothy Hane"; + name = "Mrs. Miriam Collier"; primaryKey = new String[][]{{ add(new String[]{{ - add("natus"), - }}), - add(new String[]{{ - add("hic"), - add("saepe"), - }}), - add(new String[]{{ - add("in"), - add("corporis"), - add("iste"), - }}), - }}; - syncMode = ConnectionSyncModeEnum.FULL_REFRESH_APPEND; - }}), - add(new StreamConfiguration("excepturi") {{ - cursorField = new String[]{{ - add("architecto"), - add("ipsa"), - add("reiciendis"), - }}; - name = "Shaun Osinski"; - primaryKey = new String[][]{{ - add(new String[]{{ - add("nobis"), - }}), - add(new String[]{{ - add("omnis"), - add("nemo"), - }}), - }}; - syncMode = ConnectionSyncModeEnum.FULL_REFRESH_APPEND; - }}), - add(new StreamConfiguration("quia") {{ - cursorField = new String[]{{ - add("iure"), - }}; - name = "Miss Aubrey Williamson"; - primaryKey = new String[][]{{ - add(new String[]{{ - add("repellat"), - }}), - add(new String[]{{ - add("occaecati"), - add("numquam"), - add("commodi"), - }}), - add(new String[]{{ - add("molestiae"), - add("velit"), + add("sed"), }}), }}; syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND; @@ -122,15 +76,15 @@ public class Application { }}; }};; dataResidency = GeographyEnum.US; - name = "Kayla O'Kon"; - namespaceDefinition = NamespaceDefinitionEnum.CUSTOM_FORMAT; + name = "May Turcotte"; + namespaceDefinition = NamespaceDefinitionEnum.DESTINATION; namespaceFormat = "${SOURCE_NAMESPACE}"; - nonBreakingSchemaUpdatesBehavior = NonBreakingSchemaUpdatesBehaviorEnum.IGNORE; - prefix = "tenetur"; - schedule = new ConnectionSchedule(ScheduleTypeEnum.MANUAL) {{ - cronExpression = "id"; + nonBreakingSchemaUpdatesBehavior = NonBreakingSchemaUpdatesBehaviorEnum.PROPAGATE_COLUMNS; + prefix = "iure"; + schedule = new ConnectionSchedule(ScheduleTypeEnum.CRON) {{ + cronExpression = "quidem"; }};; - status = ConnectionStatusEnum.DEPRECATED; + status = ConnectionStatusEnum.ACTIVE; }}; CreateConnectionResponse res = sdk.connections.createConnection(req); diff --git a/RELEASES.md b/RELEASES.md index 94473c816..e2053d58a 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -721,4 +721,14 @@ Based on: ### Generated - [java v0.39.1] . ### Releases -- [Maven Central v0.39.1] https://central.sonatype.com/artifact/com.airbyte/api/0.39.1 - . \ No newline at end of file +- [Maven Central v0.39.1] https://central.sonatype.com/artifact/com.airbyte/api/0.39.1 - . + +## 2023-09-12 16:22:05 +### Changes +Based on: +- OpenAPI Doc 1.0.0 +- Speakeasy CLI 1.82.5 (2.108.3) https://github.com/speakeasy-api/speakeasy +### Generated +- [java v0.39.2] . +### Releases +- [Maven Central v0.39.2] https://central.sonatype.com/artifact/com.airbyte/api/0.39.2 - . \ No newline at end of file diff --git a/USAGE.md b/USAGE.md index 5d7531f30..592ec5488 100755 --- a/USAGE.md +++ b/USAGE.md @@ -22,71 +22,25 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("corrupti") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("corrupti", "provident") {{ + password = ""; + username = ""; + }}; }}) .build(); - com.airbyte.api.models.shared.ConnectionCreateRequest req = new ConnectionCreateRequest("9bd9d8d6-9a67-44e0-b467-cc8796ed151a", "05dfc2dd-f7cc-478c-a1ba-928fc816742c") {{ + com.airbyte.api.models.shared.ConnectionCreateRequest req = new ConnectionCreateRequest("bd9d8d69-a674-4e0f-867c-c8796ed151a0", "5dfc2ddf-7cc7-48ca-9ba9-28fc816742cb") {{ configurations = new StreamConfigurations() {{ streams = new com.airbyte.api.models.shared.StreamConfiguration[]{{ - add(new StreamConfiguration("saepe") {{ + add(new StreamConfiguration("dolor") {{ cursorField = new String[]{{ - add("ipsum"), - add("excepturi"), + add("esse"), }}; - name = "Dorothy Hane"; + name = "Mrs. Miriam Collier"; primaryKey = new String[][]{{ add(new String[]{{ - add("natus"), - }}), - add(new String[]{{ - add("hic"), - add("saepe"), - }}), - add(new String[]{{ - add("in"), - add("corporis"), - add("iste"), - }}), - }}; - syncMode = ConnectionSyncModeEnum.FULL_REFRESH_APPEND; - }}), - add(new StreamConfiguration("excepturi") {{ - cursorField = new String[]{{ - add("architecto"), - add("ipsa"), - add("reiciendis"), - }}; - name = "Shaun Osinski"; - primaryKey = new String[][]{{ - add(new String[]{{ - add("nobis"), - }}), - add(new String[]{{ - add("omnis"), - add("nemo"), - }}), - }}; - syncMode = ConnectionSyncModeEnum.FULL_REFRESH_APPEND; - }}), - add(new StreamConfiguration("quia") {{ - cursorField = new String[]{{ - add("iure"), - }}; - name = "Miss Aubrey Williamson"; - primaryKey = new String[][]{{ - add(new String[]{{ - add("repellat"), - }}), - add(new String[]{{ - add("occaecati"), - add("numquam"), - add("commodi"), - }}), - add(new String[]{{ - add("molestiae"), - add("velit"), + add("sed"), }}), }}; syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND; @@ -94,15 +48,15 @@ public class Application { }}; }};; dataResidency = GeographyEnum.US; - name = "Kayla O'Kon"; - namespaceDefinition = NamespaceDefinitionEnum.CUSTOM_FORMAT; + name = "May Turcotte"; + namespaceDefinition = NamespaceDefinitionEnum.DESTINATION; namespaceFormat = "${SOURCE_NAMESPACE}"; - nonBreakingSchemaUpdatesBehavior = NonBreakingSchemaUpdatesBehaviorEnum.IGNORE; - prefix = "tenetur"; - schedule = new ConnectionSchedule(ScheduleTypeEnum.MANUAL) {{ - cronExpression = "id"; + nonBreakingSchemaUpdatesBehavior = NonBreakingSchemaUpdatesBehaviorEnum.PROPAGATE_COLUMNS; + prefix = "iure"; + schedule = new ConnectionSchedule(ScheduleTypeEnum.CRON) {{ + cronExpression = "quidem"; }};; - status = ConnectionStatusEnum.DEPRECATED; + status = ConnectionStatusEnum.ACTIVE; }}; CreateConnectionResponse res = sdk.connections.createConnection(req); diff --git a/docs/models/shared/ConnectionsResponse.md b/docs/models/shared/ConnectionsResponse.md index 7137a5083..e42dfea31 100755 --- a/docs/models/shared/ConnectionsResponse.md +++ b/docs/models/shared/ConnectionsResponse.md @@ -1,7 +1,5 @@ # ConnectionsResponse -Successful operation - ## Fields diff --git a/docs/models/shared/DestinationBigquery.md b/docs/models/shared/DestinationBigquery.md index d06c06de8..bdf2f9c0f 100755 --- a/docs/models/shared/DestinationBigquery.md +++ b/docs/models/shared/DestinationBigquery.md @@ -14,6 +14,5 @@ The values required to configure the destination. | `destinationType` | [DestinationBigqueryBigquery](../../models/shared/DestinationBigqueryBigquery.md) | :heavy_check_mark: | N/A | | | `loadingMethod` | *Object* | :heavy_minus_sign: | Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. | | | `projectId` | *String* | :heavy_check_mark: | The GCP project ID for the project containing the target BigQuery dataset. Read more here. | | -| `rawDataDataset` | *String* | :heavy_minus_sign: | (Early Access) The dataset to write raw tables into | | -| `transformationPriority` | [DestinationBigqueryTransformationQueryRunType](../../models/shared/DestinationBigqueryTransformationQueryRunType.md) | :heavy_minus_sign: | Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly. | | -| `use1s1tFormat` | *Boolean* | :heavy_minus_sign: | (Early Access) Use Destinations V2. | | \ No newline at end of file +| `rawDataDataset` | *String* | :heavy_minus_sign: | The dataset to write raw tables into | | +| `transformationPriority` | [DestinationBigqueryTransformationQueryRunType](../../models/shared/DestinationBigqueryTransformationQueryRunType.md) | :heavy_minus_sign: | Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly. | | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvus.md b/docs/models/shared/DestinationMilvus.md new file mode 100755 index 000000000..4b7689a07 --- /dev/null +++ b/docs/models/shared/DestinationMilvus.md @@ -0,0 +1,13 @@ +# DestinationMilvus + +The values required to configure the destination. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | +| `destinationType` | [DestinationMilvusMilvus](../../models/shared/DestinationMilvusMilvus.md) | :heavy_check_mark: | N/A | +| `embedding` | *Object* | :heavy_check_mark: | Embedding configuration | +| `indexing` | [DestinationMilvusIndexing](../../models/shared/DestinationMilvusIndexing.md) | :heavy_check_mark: | Indexing configuration | +| `processing` | [DestinationMilvusProcessingConfigModel](../../models/shared/DestinationMilvusProcessingConfigModel.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvusEmbeddingCohere.md b/docs/models/shared/DestinationMilvusEmbeddingCohere.md new file mode 100755 index 000000000..d09f1ebf6 --- /dev/null +++ b/docs/models/shared/DestinationMilvusEmbeddingCohere.md @@ -0,0 +1,11 @@ +# DestinationMilvusEmbeddingCohere + +Use the Cohere API to embed text. + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | +| `cohereKey` | *String* | :heavy_check_mark: | N/A | +| `mode` | [DestinationMilvusEmbeddingCohereMode](../../models/shared/DestinationMilvusEmbeddingCohereMode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvusEmbeddingCohereMode.md b/docs/models/shared/DestinationMilvusEmbeddingCohereMode.md new file mode 100755 index 000000000..dddf3b1fa --- /dev/null +++ b/docs/models/shared/DestinationMilvusEmbeddingCohereMode.md @@ -0,0 +1,8 @@ +# DestinationMilvusEmbeddingCohereMode + + +## Values + +| Name | Value | +| -------- | -------- | +| `COHERE` | cohere | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvusEmbeddingFake.md b/docs/models/shared/DestinationMilvusEmbeddingFake.md new file mode 100755 index 000000000..f0c584222 --- /dev/null +++ b/docs/models/shared/DestinationMilvusEmbeddingFake.md @@ -0,0 +1,10 @@ +# DestinationMilvusEmbeddingFake + +Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------- | +| `mode` | [DestinationMilvusEmbeddingFakeMode](../../models/shared/DestinationMilvusEmbeddingFakeMode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvusEmbeddingFakeMode.md b/docs/models/shared/DestinationMilvusEmbeddingFakeMode.md new file mode 100755 index 000000000..cead33ab2 --- /dev/null +++ b/docs/models/shared/DestinationMilvusEmbeddingFakeMode.md @@ -0,0 +1,8 @@ +# DestinationMilvusEmbeddingFakeMode + + +## Values + +| Name | Value | +| ------ | ------ | +| `FAKE` | fake | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvusEmbeddingFromField.md b/docs/models/shared/DestinationMilvusEmbeddingFromField.md new file mode 100755 index 000000000..a88e90194 --- /dev/null +++ b/docs/models/shared/DestinationMilvusEmbeddingFromField.md @@ -0,0 +1,12 @@ +# DestinationMilvusEmbeddingFromField + +Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store. + + +## Fields + +| Field | Type | Required | Description | Example | +| --------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | +| `dimensions` | *Long* | :heavy_check_mark: | The number of dimensions the embedding model is generating | 1536 | +| `fieldName` | *String* | :heavy_check_mark: | Name of the field in the record that contains the embedding | embedding | +| `mode` | [DestinationMilvusEmbeddingFromFieldMode](../../models/shared/DestinationMilvusEmbeddingFromFieldMode.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvusEmbeddingFromFieldMode.md b/docs/models/shared/DestinationMilvusEmbeddingFromFieldMode.md new file mode 100755 index 000000000..494abf0d3 --- /dev/null +++ b/docs/models/shared/DestinationMilvusEmbeddingFromFieldMode.md @@ -0,0 +1,8 @@ +# DestinationMilvusEmbeddingFromFieldMode + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `FROM_FIELD` | from_field | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvusEmbeddingOpenAI.md b/docs/models/shared/DestinationMilvusEmbeddingOpenAI.md new file mode 100755 index 000000000..e3d9875ae --- /dev/null +++ b/docs/models/shared/DestinationMilvusEmbeddingOpenAI.md @@ -0,0 +1,11 @@ +# DestinationMilvusEmbeddingOpenAI + +Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | +| `mode` | [DestinationMilvusEmbeddingOpenAIMode](../../models/shared/DestinationMilvusEmbeddingOpenAIMode.md) | :heavy_minus_sign: | N/A | +| `openaiKey` | *String* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvusEmbeddingOpenAIMode.md b/docs/models/shared/DestinationMilvusEmbeddingOpenAIMode.md new file mode 100755 index 000000000..0a50451d3 --- /dev/null +++ b/docs/models/shared/DestinationMilvusEmbeddingOpenAIMode.md @@ -0,0 +1,8 @@ +# DestinationMilvusEmbeddingOpenAIMode + + +## Values + +| Name | Value | +| -------- | -------- | +| `OPENAI` | openai | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvusIndexing.md b/docs/models/shared/DestinationMilvusIndexing.md new file mode 100755 index 000000000..8ea856014 --- /dev/null +++ b/docs/models/shared/DestinationMilvusIndexing.md @@ -0,0 +1,15 @@ +# DestinationMilvusIndexing + +Indexing configuration + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------- | ------------------------------------------------------- | ------------------------------------------------------- | ------------------------------------------------------- | ------------------------------------------------------- | +| `auth` | *Object* | :heavy_check_mark: | Authentication method | | +| `collection` | *String* | :heavy_check_mark: | The collection to load data into | | +| `db` | *String* | :heavy_minus_sign: | The database to connect to | | +| `host` | *String* | :heavy_check_mark: | The public endpoint of the Milvus instance. | https://my-instance.zone.zillizcloud.com | +| `textField` | *String* | :heavy_minus_sign: | The field in the entity that contains the embedded text | | +| `vectorField` | *String* | :heavy_minus_sign: | The field in the entity that contains the vector | | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvusIndexingAuthAPIToken.md b/docs/models/shared/DestinationMilvusIndexingAuthAPIToken.md new file mode 100755 index 000000000..cace581c1 --- /dev/null +++ b/docs/models/shared/DestinationMilvusIndexingAuthAPIToken.md @@ -0,0 +1,11 @@ +# DestinationMilvusIndexingAuthAPIToken + +Authenticate using an API token (suitable for Zilliz Cloud) + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------- | +| `mode` | [DestinationMilvusIndexingAuthAPITokenMode](../../models/shared/DestinationMilvusIndexingAuthAPITokenMode.md) | :heavy_minus_sign: | N/A | +| `token` | *String* | :heavy_check_mark: | API Token for the Milvus instance | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvusIndexingAuthAPITokenMode.md b/docs/models/shared/DestinationMilvusIndexingAuthAPITokenMode.md new file mode 100755 index 000000000..01a88b86d --- /dev/null +++ b/docs/models/shared/DestinationMilvusIndexingAuthAPITokenMode.md @@ -0,0 +1,8 @@ +# DestinationMilvusIndexingAuthAPITokenMode + + +## Values + +| Name | Value | +| ------- | ------- | +| `TOKEN` | token | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvusIndexingAuthNoAuth.md b/docs/models/shared/DestinationMilvusIndexingAuthNoAuth.md new file mode 100755 index 000000000..cdeafaa0f --- /dev/null +++ b/docs/models/shared/DestinationMilvusIndexingAuthNoAuth.md @@ -0,0 +1,10 @@ +# DestinationMilvusIndexingAuthNoAuth + +Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses) + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | +| `mode` | [DestinationMilvusIndexingAuthNoAuthMode](../../models/shared/DestinationMilvusIndexingAuthNoAuthMode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvusIndexingAuthNoAuthMode.md b/docs/models/shared/DestinationMilvusIndexingAuthNoAuthMode.md new file mode 100755 index 000000000..b9c3d3c4f --- /dev/null +++ b/docs/models/shared/DestinationMilvusIndexingAuthNoAuthMode.md @@ -0,0 +1,8 @@ +# DestinationMilvusIndexingAuthNoAuthMode + + +## Values + +| Name | Value | +| --------- | --------- | +| `NO_AUTH` | no_auth | \ No newline at end of file diff --git a/docs/models/shared/SourcePostgresReplicationMethodStandardXmin.md b/docs/models/shared/DestinationMilvusIndexingAuthUsernamePassword.md similarity index 50% rename from docs/models/shared/SourcePostgresReplicationMethodStandardXmin.md rename to docs/models/shared/DestinationMilvusIndexingAuthUsernamePassword.md index b7c16a01b..e99b1d689 100755 --- a/docs/models/shared/SourcePostgresReplicationMethodStandardXmin.md +++ b/docs/models/shared/DestinationMilvusIndexingAuthUsernamePassword.md @@ -1,10 +1,12 @@ -# SourcePostgresReplicationMethodStandardXmin +# DestinationMilvusIndexingAuthUsernamePassword -Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally. +Authenticate using username and password (suitable for self-managed Milvus clusters) ## Fields | Field | Type | Required | Description | | ----------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------- | -| `method` | [SourcePostgresReplicationMethodStandardXminMethod](../../models/shared/SourcePostgresReplicationMethodStandardXminMethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| `mode` | [DestinationMilvusIndexingAuthUsernamePasswordMode](../../models/shared/DestinationMilvusIndexingAuthUsernamePasswordMode.md) | :heavy_minus_sign: | N/A | +| `password` | *String* | :heavy_check_mark: | Password for the Milvus instance | +| `username` | *String* | :heavy_check_mark: | Username for the Milvus instance | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvusIndexingAuthUsernamePasswordMode.md b/docs/models/shared/DestinationMilvusIndexingAuthUsernamePasswordMode.md new file mode 100755 index 000000000..ede6f8ba4 --- /dev/null +++ b/docs/models/shared/DestinationMilvusIndexingAuthUsernamePasswordMode.md @@ -0,0 +1,8 @@ +# DestinationMilvusIndexingAuthUsernamePasswordMode + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `USERNAME_PASSWORD` | username_password | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvusMilvus.md b/docs/models/shared/DestinationMilvusMilvus.md new file mode 100755 index 000000000..012584cc3 --- /dev/null +++ b/docs/models/shared/DestinationMilvusMilvus.md @@ -0,0 +1,8 @@ +# DestinationMilvusMilvus + + +## Values + +| Name | Value | +| -------- | -------- | +| `MILVUS` | milvus | \ No newline at end of file diff --git a/docs/models/shared/DestinationMilvusProcessingConfigModel.md b/docs/models/shared/DestinationMilvusProcessingConfigModel.md new file mode 100755 index 000000000..d4f801ee8 --- /dev/null +++ b/docs/models/shared/DestinationMilvusProcessingConfigModel.md @@ -0,0 +1,11 @@ +# DestinationMilvusProcessingConfigModel + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `chunkOverlap` | *Long* | :heavy_minus_sign: | Size of overlap between chunks in tokens to store in vector store to better capture relevant context | +| `chunkSize` | *Long* | :heavy_check_mark: | Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM) | +| `metadataFields` | List<*String*> | :heavy_minus_sign: | List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. | +| `textFields` | List<*String*> | :heavy_minus_sign: | List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. | \ No newline at end of file diff --git a/docs/models/shared/DestinationPinecone.md b/docs/models/shared/DestinationPinecone.md new file mode 100755 index 000000000..d4a3140ca --- /dev/null +++ b/docs/models/shared/DestinationPinecone.md @@ -0,0 +1,13 @@ +# DestinationPinecone + +The values required to configure the destination. + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------- | +| `destinationType` | [DestinationPineconePinecone](../../models/shared/DestinationPineconePinecone.md) | :heavy_check_mark: | N/A | +| `embedding` | *Object* | :heavy_check_mark: | Embedding configuration | +| `indexing` | [DestinationPineconeIndexing](../../models/shared/DestinationPineconeIndexing.md) | :heavy_check_mark: | Pinecone is a popular vector store that can be used to store and retrieve embeddings. | +| `processing` | [DestinationPineconeProcessingConfigModel](../../models/shared/DestinationPineconeProcessingConfigModel.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/DestinationPineconeEmbeddingCohere.md b/docs/models/shared/DestinationPineconeEmbeddingCohere.md new file mode 100755 index 000000000..58c03d6e6 --- /dev/null +++ b/docs/models/shared/DestinationPineconeEmbeddingCohere.md @@ -0,0 +1,11 @@ +# DestinationPineconeEmbeddingCohere + +Use the Cohere API to embed text. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | +| `cohereKey` | *String* | :heavy_check_mark: | N/A | +| `mode` | [DestinationPineconeEmbeddingCohereMode](../../models/shared/DestinationPineconeEmbeddingCohereMode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/shared/DestinationPineconeEmbeddingCohereMode.md b/docs/models/shared/DestinationPineconeEmbeddingCohereMode.md new file mode 100755 index 000000000..9bc406815 --- /dev/null +++ b/docs/models/shared/DestinationPineconeEmbeddingCohereMode.md @@ -0,0 +1,8 @@ +# DestinationPineconeEmbeddingCohereMode + + +## Values + +| Name | Value | +| -------- | -------- | +| `COHERE` | cohere | \ No newline at end of file diff --git a/docs/models/shared/DestinationPineconeEmbeddingFake.md b/docs/models/shared/DestinationPineconeEmbeddingFake.md new file mode 100755 index 000000000..8ca60feae --- /dev/null +++ b/docs/models/shared/DestinationPineconeEmbeddingFake.md @@ -0,0 +1,10 @@ +# DestinationPineconeEmbeddingFake + +Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | +| `mode` | [DestinationPineconeEmbeddingFakeMode](../../models/shared/DestinationPineconeEmbeddingFakeMode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/shared/DestinationPineconeEmbeddingFakeMode.md b/docs/models/shared/DestinationPineconeEmbeddingFakeMode.md new file mode 100755 index 000000000..632763918 --- /dev/null +++ b/docs/models/shared/DestinationPineconeEmbeddingFakeMode.md @@ -0,0 +1,8 @@ +# DestinationPineconeEmbeddingFakeMode + + +## Values + +| Name | Value | +| ------ | ------ | +| `FAKE` | fake | \ No newline at end of file diff --git a/docs/models/shared/DestinationPineconeEmbeddingOpenAI.md b/docs/models/shared/DestinationPineconeEmbeddingOpenAI.md new file mode 100755 index 000000000..b63fb3849 --- /dev/null +++ b/docs/models/shared/DestinationPineconeEmbeddingOpenAI.md @@ -0,0 +1,11 @@ +# DestinationPineconeEmbeddingOpenAI + +Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | +| `mode` | [DestinationPineconeEmbeddingOpenAIMode](../../models/shared/DestinationPineconeEmbeddingOpenAIMode.md) | :heavy_minus_sign: | N/A | +| `openaiKey` | *String* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/DestinationPineconeEmbeddingOpenAIMode.md b/docs/models/shared/DestinationPineconeEmbeddingOpenAIMode.md new file mode 100755 index 000000000..fd3b12b13 --- /dev/null +++ b/docs/models/shared/DestinationPineconeEmbeddingOpenAIMode.md @@ -0,0 +1,8 @@ +# DestinationPineconeEmbeddingOpenAIMode + + +## Values + +| Name | Value | +| -------- | -------- | +| `OPENAI` | openai | \ No newline at end of file diff --git a/docs/models/shared/DestinationPineconeIndexing.md b/docs/models/shared/DestinationPineconeIndexing.md new file mode 100755 index 000000000..29d17709e --- /dev/null +++ b/docs/models/shared/DestinationPineconeIndexing.md @@ -0,0 +1,12 @@ +# DestinationPineconeIndexing + +Pinecone is a popular vector store that can be used to store and retrieve embeddings. + + +## Fields + +| Field | Type | Required | Description | +| --------------------------- | --------------------------- | --------------------------- | --------------------------- | +| `index` | *String* | :heavy_check_mark: | Pinecone index to use | +| `pineconeEnvironment` | *String* | :heavy_check_mark: | Pinecone environment to use | +| `pineconeKey` | *String* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/DestinationPineconePinecone.md b/docs/models/shared/DestinationPineconePinecone.md new file mode 100755 index 000000000..7120b0964 --- /dev/null +++ b/docs/models/shared/DestinationPineconePinecone.md @@ -0,0 +1,8 @@ +# DestinationPineconePinecone + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `PINECONE` | pinecone | \ No newline at end of file diff --git a/docs/models/shared/DestinationPineconeProcessingConfigModel.md b/docs/models/shared/DestinationPineconeProcessingConfigModel.md new file mode 100755 index 000000000..08f9e4866 --- /dev/null +++ b/docs/models/shared/DestinationPineconeProcessingConfigModel.md @@ -0,0 +1,11 @@ +# DestinationPineconeProcessingConfigModel + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `chunkOverlap` | *Long* | :heavy_minus_sign: | Size of overlap between chunks in tokens to store in vector store to better capture relevant context | +| `chunkSize` | *Long* | :heavy_check_mark: | Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM) | +| `metadataFields` | List<*String*> | :heavy_minus_sign: | List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. | +| `textFields` | List<*String*> | :heavy_minus_sign: | List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. | \ No newline at end of file diff --git a/docs/models/shared/DestinationSnowflake.md b/docs/models/shared/DestinationSnowflake.md index c070c5e20..196f16b27 100755 --- a/docs/models/shared/DestinationSnowflake.md +++ b/docs/models/shared/DestinationSnowflake.md @@ -12,9 +12,8 @@ The values required to configure the destination. | `destinationType` | [DestinationSnowflakeSnowflake](../../models/shared/DestinationSnowflakeSnowflake.md) | :heavy_check_mark: | N/A | | | `host` | *String* | :heavy_check_mark: | Enter your Snowflake account's locator (in the format ...snowflakecomputing.com) | accountname.us-east-2.aws.snowflakecomputing.com | | `jdbcUrlParams` | *String* | :heavy_minus_sign: | Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3 | | -| `rawDataSchema` | *String* | :heavy_minus_sign: | (Beta) The schema to write raw tables into | | +| `rawDataSchema` | *String* | :heavy_minus_sign: | The schema to write raw tables into | | | `role` | *String* | :heavy_check_mark: | Enter the role that you want to use to access Snowflake | AIRBYTE_ROLE | | `schema` | *String* | :heavy_check_mark: | Enter the name of the default schema | AIRBYTE_SCHEMA | -| `use1s1tFormat` | *Boolean* | :heavy_minus_sign: | (Beta) Use Destinations V2. Contact Airbyte Support to participate in the beta program. | | | `username` | *String* | :heavy_check_mark: | Enter the name of the user you want to use to access the database | AIRBYTE_USER | | `warehouse` | *String* | :heavy_check_mark: | Enter the name of the warehouse that you want to sync data into | AIRBYTE_WAREHOUSE | \ No newline at end of file diff --git a/docs/models/shared/DestinationTypesense.md b/docs/models/shared/DestinationTypesense.md index 325a4b531..c8eb43ffe 100755 --- a/docs/models/shared/DestinationTypesense.md +++ b/docs/models/shared/DestinationTypesense.md @@ -8,7 +8,7 @@ The values required to configure the destination. | Field | Type | Required | Description | | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | | `apiKey` | *String* | :heavy_check_mark: | Typesense API Key | -| `batchSize` | *String* | :heavy_minus_sign: | How many documents should be imported together. Default 1000 | +| `batchSize` | *Long* | :heavy_minus_sign: | How many documents should be imported together. Default 1000 | | `destinationType` | [DestinationTypesenseTypesense](../../models/shared/DestinationTypesenseTypesense.md) | :heavy_check_mark: | N/A | | `host` | *String* | :heavy_check_mark: | Hostname of the Typesense instance without protocol. | | `port` | *String* | :heavy_minus_sign: | Port of the Typesense instance. Ex: 8108, 80, 443. Default is 443 | diff --git a/docs/models/shared/DestinationsResponse.md b/docs/models/shared/DestinationsResponse.md index 11ef33d50..8c5c6957a 100755 --- a/docs/models/shared/DestinationsResponse.md +++ b/docs/models/shared/DestinationsResponse.md @@ -1,7 +1,5 @@ # DestinationsResponse -Successful operation - ## Fields diff --git a/docs/models/shared/Intercom.md b/docs/models/shared/Intercom.md index 438e02216..f150da87a 100755 --- a/docs/models/shared/Intercom.md +++ b/docs/models/shared/Intercom.md @@ -5,7 +5,7 @@ The values required to configure the source. ## Fields -| Field | Type | Required | Description | -| ------------------ | ------------------ | ------------------ | ------------------ | -| `clientId` | *String* | :heavy_minus_sign: | N/A | -| `clientSecret` | *String* | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | +| `clientId` | *String* | :heavy_minus_sign: | Client Id for your Intercom application. | +| `clientSecret` | *String* | :heavy_minus_sign: | Client Secret for your Intercom application. | \ No newline at end of file diff --git a/docs/models/shared/JobStatusEnum.md b/docs/models/shared/JobStatusEnum.md index b6786912b..2562629d6 100755 --- a/docs/models/shared/JobStatusEnum.md +++ b/docs/models/shared/JobStatusEnum.md @@ -1,7 +1,5 @@ # JobStatusEnum -The Job status you want to filter by - ## Values diff --git a/docs/models/shared/JobsResponse.md b/docs/models/shared/JobsResponse.md index a7f07bb6c..e38f98da5 100755 --- a/docs/models/shared/JobsResponse.md +++ b/docs/models/shared/JobsResponse.md @@ -1,7 +1,5 @@ # JobsResponse -List all the Jobs by connectionId. - ## Fields diff --git a/docs/models/shared/LinkedinAdsCredentials.md b/docs/models/shared/LinkedinAdsCredentials.md index 350ccc58f..94fafc8e2 100755 --- a/docs/models/shared/LinkedinAdsCredentials.md +++ b/docs/models/shared/LinkedinAdsCredentials.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| --------------------------------------------------------- | --------------------------------------------------------- | --------------------------------------------------------- | --------------------------------------------------------- | -| `clientId` | *String* | :heavy_minus_sign: | The client ID of the LinkedIn Ads developer application. | -| `clientSecret` | *String* | :heavy_minus_sign: | The client secret the LinkedIn Ads developer application. | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `clientId` | *String* | :heavy_minus_sign: | The client ID of your developer application. Refer to our documentation for more information. | +| `clientSecret` | *String* | :heavy_minus_sign: | The client secret of your developer application. Refer to our documentation for more information. | \ No newline at end of file diff --git a/docs/models/shared/SchemeBasicAuth.md b/docs/models/shared/SchemeBasicAuth.md new file mode 100755 index 000000000..e9deba912 --- /dev/null +++ b/docs/models/shared/SchemeBasicAuth.md @@ -0,0 +1,9 @@ +# SchemeBasicAuth + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------ | ------------------ | ------------------ | ------------------ | ------------------ | +| `password` | *String* | :heavy_check_mark: | N/A | | +| `username` | *String* | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/shared/Security.md b/docs/models/shared/Security.md index 730dca462..d3a21e63a 100755 --- a/docs/models/shared/Security.md +++ b/docs/models/shared/Security.md @@ -3,6 +3,7 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------ | ------------------ | ------------------ | ------------------ | ------------------ | -| `bearerAuth` | *String* | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| --------------------------------------------------------- | --------------------------------------------------------- | --------------------------------------------------------- | --------------------------------------------------------- | --------------------------------------------------------- | +| `basicAuth` | [SchemeBasicAuth](../../models/shared/SchemeBasicAuth.md) | :heavy_minus_sign: | N/A | | +| `bearerAuth` | *String* | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/shared/SourceAmazonAds.md b/docs/models/shared/SourceAmazonAds.md index ca2e336fc..85faac0c8 100755 --- a/docs/models/shared/SourceAmazonAds.md +++ b/docs/models/shared/SourceAmazonAds.md @@ -11,7 +11,8 @@ The values required to configure the source. | `clientId` | *String* | :heavy_check_mark: | The client ID of your Amazon Ads developer application. See the docs for more information. | | | `clientSecret` | *String* | :heavy_check_mark: | The client secret of your Amazon Ads developer application. See the docs for more information. | | | `lookBackWindow` | *Long* | :heavy_minus_sign: | The amount of days to go back in time to get the updated data from Amazon Ads | 3 | -| `profiles` | List<*Long*> | :heavy_minus_sign: | Profile IDs you want to fetch data for. See docs for more details. | | +| `marketplaceIds` | List<*String*> | :heavy_minus_sign: | Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID. | | +| `profiles` | List<*Long*> | :heavy_minus_sign: | Profile IDs you want to fetch data for. See docs for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID. | | | `refreshToken` | *String* | :heavy_check_mark: | Amazon Ads refresh token. See the docs for more information on how to obtain this token. | | | `region` | [SourceAmazonAdsRegion](../../models/shared/SourceAmazonAdsRegion.md) | :heavy_minus_sign: | Region to pull data from (EU/NA/FE). See docs for more details. | | | `reportRecordTypes` | List<[SourceAmazonAdsReportRecordTypes](../../models/shared/SourceAmazonAdsReportRecordTypes.md)> | :heavy_minus_sign: | Optional configuration which accepts an array of string of record types. Leave blank for default behaviour to pull all report types. Use this config option only if you want to pull specific report type(s). See docs for more details | | diff --git a/docs/models/shared/SourceApifyDataset.md b/docs/models/shared/SourceApifyDataset.md index f013dd19c..52d2f3980 100755 --- a/docs/models/shared/SourceApifyDataset.md +++ b/docs/models/shared/SourceApifyDataset.md @@ -5,8 +5,9 @@ The values required to configure the source. ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `clean` | *Boolean* | :heavy_minus_sign: | If set to true, only clean items will be downloaded from the dataset. See description of what clean means in Apify API docs. If not sure, set clean to false. | -| `datasetId` | *String* | :heavy_check_mark: | ID of the dataset you would like to load to Airbyte. | -| `sourceType` | [SourceApifyDatasetApifyDataset](../../models/shared/SourceApifyDatasetApifyDataset.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `clean` | *Boolean* | :heavy_minus_sign: | If set to true, only clean items will be downloaded from the dataset. See description of what clean means in Apify API docs. If not sure, set clean to false. | | +| `datasetId` | *String* | :heavy_minus_sign: | ID of the dataset you would like to load to Airbyte. | | +| `sourceType` | [SourceApifyDatasetApifyDataset](../../models/shared/SourceApifyDatasetApifyDataset.md) | :heavy_check_mark: | N/A | | +| `token` | *String* | :heavy_check_mark: | Your application's Client Secret. You can find this value on the console integrations tab after you login. | Personal API tokens | \ No newline at end of file diff --git a/docs/models/shared/SourceAppfollow.md b/docs/models/shared/SourceAppfollow.md index 00be8804a..6e7d30f0b 100755 --- a/docs/models/shared/SourceAppfollow.md +++ b/docs/models/shared/SourceAppfollow.md @@ -5,10 +5,7 @@ The values required to configure the source. ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------- | -| `apiSecret` | *String* | :heavy_check_mark: | api secret provided by Appfollow | -| `cid` | *String* | :heavy_check_mark: | client id provided by Appfollow | -| `country` | *String* | :heavy_check_mark: | getting data by Country | -| `extId` | *String* | :heavy_check_mark: | for App Store — this is 9-10 digits identification number; for Google Play — this is bundle name; | -| `sourceType` | [SourceAppfollowAppfollow](../../models/shared/SourceAppfollowAppfollow.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| --------------------------------------------------------------------------- | --------------------------------------------------------------------------- | --------------------------------------------------------------------------- | --------------------------------------------------------------------------- | +| `apiSecret` | *String* | :heavy_minus_sign: | API Key provided by Appfollow | +| `sourceType` | [SourceAppfollowAppfollow](../../models/shared/SourceAppfollowAppfollow.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/SourceAuth0.md b/docs/models/shared/SourceAuth0.md index 41704765a..59f4c0eb1 100755 --- a/docs/models/shared/SourceAuth0.md +++ b/docs/models/shared/SourceAuth0.md @@ -9,4 +9,5 @@ The values required to configure the source. | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | | `baseUrl` | *String* | :heavy_check_mark: | The Authentication API is served over HTTPS. All URLs referenced in the documentation have the following base `https://YOUR_DOMAIN` | https://dev-yourOrg.us.auth0.com/ | | `credentials` | *Object* | :heavy_check_mark: | N/A | | -| `sourceType` | [SourceAuth0Auth0](../../models/shared/SourceAuth0Auth0.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| `sourceType` | [SourceAuth0Auth0](../../models/shared/SourceAuth0Auth0.md) | :heavy_check_mark: | N/A | | +| `startDate` | *String* | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. | 2023-08-05T00:43:59.244Z | \ No newline at end of file diff --git a/docs/models/shared/SourceAuth0CredentialsOAuth2AccessToken.md b/docs/models/shared/SourceAuth0CredentialsOAuth2AccessToken.md index 2b9f5c9ef..3b458ae78 100755 --- a/docs/models/shared/SourceAuth0CredentialsOAuth2AccessToken.md +++ b/docs/models/shared/SourceAuth0CredentialsOAuth2AccessToken.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `accessToken` | *String* | :heavy_check_mark: | Also called API Access Token The access token used to call the Auth0 Management API Token. It's a JWT that contains specific grant permissions knowns as scopes. | -| `authType` | [SourceAuth0CredentialsOAuth2AccessTokenAuthenticationMethod](../../models/shared/SourceAuth0CredentialsOAuth2AccessTokenAuthenticationMethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `accessToken` | *String* | :heavy_check_mark: | Also called API Access Token The access token used to call the Auth0 Management API Token. It's a JWT that contains specific grant permissions knowns as scopes. | | +| `authType` | [SourceAuth0CredentialsOAuth2AccessTokenAuthenticationMethod](../../models/shared/SourceAuth0CredentialsOAuth2AccessTokenAuthenticationMethod.md) | :heavy_check_mark: | N/A | oauth2_access_token | \ No newline at end of file diff --git a/docs/models/shared/SourceDatadog.md b/docs/models/shared/SourceDatadog.md deleted file mode 100755 index d02bb6b09..000000000 --- a/docs/models/shared/SourceDatadog.md +++ /dev/null @@ -1,18 +0,0 @@ -# SourceDatadog - -The values required to configure the source. - - -## Fields - -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `apiKey` | *String* | :heavy_check_mark: | Datadog API key | | -| `applicationKey` | *String* | :heavy_check_mark: | Datadog application key | | -| `endDate` | *String* | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Data after this date will not be replicated. An empty value will represent the current datetime for each execution. This just applies to Incremental syncs. | 2022-10-01T00:00:00Z | -| `maxRecordsPerRequest` | *Long* | :heavy_minus_sign: | Maximum number of records to collect per request. | | -| `queries` | List<[SourceDatadogQueries](../../models/shared/SourceDatadogQueries.md)> | :heavy_minus_sign: | List of queries to be run and used as inputs. | | -| `query` | *String* | :heavy_minus_sign: | The search query. This just applies to Incremental syncs. If empty, it'll collect all logs. | | -| `site` | [SourceDatadogSite](../../models/shared/SourceDatadogSite.md) | :heavy_minus_sign: | The site where Datadog data resides in. | | -| `sourceType` | [SourceDatadogDatadog](../../models/shared/SourceDatadogDatadog.md) | :heavy_check_mark: | N/A | | -| `startDate` | *String* | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. This just applies to Incremental syncs. | 2022-10-01T00:00:00Z | \ No newline at end of file diff --git a/docs/models/shared/SourceDatadogDatadog.md b/docs/models/shared/SourceDatadogDatadog.md deleted file mode 100755 index 2c543f61f..000000000 --- a/docs/models/shared/SourceDatadogDatadog.md +++ /dev/null @@ -1,8 +0,0 @@ -# SourceDatadogDatadog - - -## Values - -| Name | Value | -| --------- | --------- | -| `DATADOG` | datadog | \ No newline at end of file diff --git a/docs/models/shared/SourceDatadogQueries.md b/docs/models/shared/SourceDatadogQueries.md deleted file mode 100755 index d6fb1ea60..000000000 --- a/docs/models/shared/SourceDatadogQueries.md +++ /dev/null @@ -1,10 +0,0 @@ -# SourceDatadogQueries - - -## Fields - -| Field | Type | Required | Description | -| --------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- | -| `dataSource` | [SourceDatadogQueriesDataSource](../../models/shared/SourceDatadogQueriesDataSource.md) | :heavy_check_mark: | A data source that is powered by the platform. | -| `name` | *String* | :heavy_check_mark: | The variable name for use in queries. | -| `query` | *String* | :heavy_check_mark: | A classic query string. | \ No newline at end of file diff --git a/docs/models/shared/SourceDatadogQueriesDataSource.md b/docs/models/shared/SourceDatadogQueriesDataSource.md deleted file mode 100755 index 54a779539..000000000 --- a/docs/models/shared/SourceDatadogQueriesDataSource.md +++ /dev/null @@ -1,13 +0,0 @@ -# SourceDatadogQueriesDataSource - -A data source that is powered by the platform. - - -## Values - -| Name | Value | -| ------------ | ------------ | -| `METRICS` | metrics | -| `CLOUD_COST` | cloud_cost | -| `LOGS` | logs | -| `RUM` | rum | \ No newline at end of file diff --git a/docs/models/shared/SourceDatadogSite.md b/docs/models/shared/SourceDatadogSite.md deleted file mode 100755 index d7a846651..000000000 --- a/docs/models/shared/SourceDatadogSite.md +++ /dev/null @@ -1,14 +0,0 @@ -# SourceDatadogSite - -The site where Datadog data resides in. - - -## Values - -| Name | Value | -| ------------------- | ------------------- | -| `DATADOGHQ_COM` | datadoghq.com | -| `US3_DATADOGHQ_COM` | us3.datadoghq.com | -| `US5_DATADOGHQ_COM` | us5.datadoghq.com | -| `DATADOGHQ_EU` | datadoghq.eu | -| `DDOG_GOV_COM` | ddog-gov.com | \ No newline at end of file diff --git a/docs/models/shared/SourceGoogleAds.md b/docs/models/shared/SourceGoogleAds.md index 810c132f3..ccefb9162 100755 --- a/docs/models/shared/SourceGoogleAds.md +++ b/docs/models/shared/SourceGoogleAds.md @@ -11,7 +11,7 @@ The values required to configure the source. | `credentials` | [SourceGoogleAdsGoogleCredentials](../../models/shared/SourceGoogleAdsGoogleCredentials.md) | :heavy_check_mark: | N/A | | | `customQueries` | List<[SourceGoogleAdsCustomQueries](../../models/shared/SourceGoogleAdsCustomQueries.md)> | :heavy_minus_sign: | N/A | | | `customerId` | *String* | :heavy_check_mark: | Comma-separated list of (client) customer IDs. Each customer ID must be specified as a 10-digit number without dashes. For detailed instructions on finding this value, refer to our documentation. | 6783948572,5839201945 | -| `endDate` | [LocalDate](https://docs.oracle.com/javase/8/docs/api/java/time/LocalDate.html) | :heavy_minus_sign: | UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. | 2017-01-30 | +| `endDate` | [LocalDate](https://docs.oracle.com/javase/8/docs/api/java/time/LocalDate.html) | :heavy_minus_sign: | UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. (Default value of today is used if not set) | 2017-01-30 | | `loginCustomerId` | *String* | :heavy_minus_sign: | If your access to the customer account is through a manager account, this field is required, and must be set to the 10-digit customer ID of the manager account. For more information about this field, refer to Google's documentation. | 7349206847 | | `sourceType` | [SourceGoogleAdsGoogleAds](../../models/shared/SourceGoogleAdsGoogleAds.md) | :heavy_check_mark: | N/A | | -| `startDate` | [LocalDate](https://docs.oracle.com/javase/8/docs/api/java/time/LocalDate.html) | :heavy_check_mark: | UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. | 2017-01-25 | \ No newline at end of file +| `startDate` | [LocalDate](https://docs.oracle.com/javase/8/docs/api/java/time/LocalDate.html) | :heavy_minus_sign: | UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. (Default value of two years ago is used if not set) | 2017-01-25 | \ No newline at end of file diff --git a/docs/models/shared/SourceGoogleSearchConsole.md b/docs/models/shared/SourceGoogleSearchConsole.md index 0de88bc0d..cbc9b3028 100755 --- a/docs/models/shared/SourceGoogleSearchConsole.md +++ b/docs/models/shared/SourceGoogleSearchConsole.md @@ -5,12 +5,13 @@ The values required to configure the source. ## Fields -| Field | Type | Required | Description | Example | -| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `authorization` | *Object* | :heavy_check_mark: | N/A | | -| `customReports` | *String* | :heavy_minus_sign: | A JSON array describing the custom reports you want to sync from Google Search Console. See the docs for more information about the exact format you can use to fill out this field. | | -| `dataState` | [SourceGoogleSearchConsoleDataState](../../models/shared/SourceGoogleSearchConsoleDataState.md) | :heavy_minus_sign: | If "final" or if this parameter is omitted, the returned data will include only finalized data. Setting this parameter to "all" should not be used with Incremental Sync mode as it may cause data loss. If "all", data will include fresh data. | final | -| `endDate` | [LocalDate](https://docs.oracle.com/javase/8/docs/api/java/time/LocalDate.html) | :heavy_minus_sign: | UTC date in the format 2017-01-25. Any data after this date will not be replicated. Must be greater or equal to the start date field. | 2021-12-12 | -| `siteUrls` | List<*String*> | :heavy_check_mark: | The URLs of the website property attached to your GSC account. Read more here. | | -| `sourceType` | [SourceGoogleSearchConsoleGoogleSearchConsole](../../models/shared/SourceGoogleSearchConsoleGoogleSearchConsole.md) | :heavy_check_mark: | N/A | | -| `startDate` | [LocalDate](https://docs.oracle.com/javase/8/docs/api/java/time/LocalDate.html) | :heavy_check_mark: | UTC date in the format 2017-01-25. Any data before this date will not be replicated. | 2021-01-01 | \ No newline at end of file +| Field | Type | Required | Description | Example | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `authorization` | *Object* | :heavy_check_mark: | N/A | | +| `customReports` | *String* | :heavy_minus_sign: | (DEPRCATED) A JSON array describing the custom reports you want to sync from Google Search Console. See our documentation for more information on formulating custom reports. | | +| `customReportsArray` | List<[SourceGoogleSearchConsoleCustomReportConfig](../../models/shared/SourceGoogleSearchConsoleCustomReportConfig.md)> | :heavy_minus_sign: | You can add your Custom Analytics report by creating one. | | +| `dataState` | [SourceGoogleSearchConsoleDataFreshness](../../models/shared/SourceGoogleSearchConsoleDataFreshness.md) | :heavy_minus_sign: | If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation. | final | +| `endDate` | [LocalDate](https://docs.oracle.com/javase/8/docs/api/java/time/LocalDate.html) | :heavy_minus_sign: | UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward. | 2021-12-12 | +| `siteUrls` | List<*String*> | :heavy_check_mark: | The URLs of the website property attached to your GSC account. Learn more about properties here. | | +| `sourceType` | [SourceGoogleSearchConsoleGoogleSearchConsole](../../models/shared/SourceGoogleSearchConsoleGoogleSearchConsole.md) | :heavy_check_mark: | N/A | | +| `startDate` | [LocalDate](https://docs.oracle.com/javase/8/docs/api/java/time/LocalDate.html) | :heavy_minus_sign: | UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. | | \ No newline at end of file diff --git a/docs/models/shared/SourceGoogleSearchConsoleCustomReportConfig.md b/docs/models/shared/SourceGoogleSearchConsoleCustomReportConfig.md new file mode 100755 index 000000000..7fc00657e --- /dev/null +++ b/docs/models/shared/SourceGoogleSearchConsoleCustomReportConfig.md @@ -0,0 +1,9 @@ +# SourceGoogleSearchConsoleCustomReportConfig + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------- | +| `dimensions` | List<[SourceGoogleSearchConsoleCustomReportConfigValidEnums](../../models/shared/SourceGoogleSearchConsoleCustomReportConfigValidEnums.md)> | :heavy_check_mark: | A list of dimensions (country, date, device, page, query) | +| `name` | *String* | :heavy_check_mark: | The name of the custom report, this name would be used as stream name | \ No newline at end of file diff --git a/docs/models/shared/SourceGoogleSearchConsoleCustomReportConfigValidEnums.md b/docs/models/shared/SourceGoogleSearchConsoleCustomReportConfigValidEnums.md new file mode 100755 index 000000000..ccae0d1d3 --- /dev/null +++ b/docs/models/shared/SourceGoogleSearchConsoleCustomReportConfigValidEnums.md @@ -0,0 +1,14 @@ +# SourceGoogleSearchConsoleCustomReportConfigValidEnums + +An enumeration of dimensions. + + +## Values + +| Name | Value | +| --------- | --------- | +| `COUNTRY` | country | +| `DATE` | date | +| `DEVICE` | device | +| `PAGE` | page | +| `QUERY` | query | \ No newline at end of file diff --git a/docs/models/shared/SourceGoogleSearchConsoleDataFreshness.md b/docs/models/shared/SourceGoogleSearchConsoleDataFreshness.md new file mode 100755 index 000000000..676a18afe --- /dev/null +++ b/docs/models/shared/SourceGoogleSearchConsoleDataFreshness.md @@ -0,0 +1,11 @@ +# SourceGoogleSearchConsoleDataFreshness + +If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation. + + +## Values + +| Name | Value | +| -------- | -------- | +| `FINAL_` | final | +| `ALL` | all | \ No newline at end of file diff --git a/docs/models/shared/SourceGoogleSearchConsoleDataState.md b/docs/models/shared/SourceGoogleSearchConsoleDataState.md deleted file mode 100755 index 335f37713..000000000 --- a/docs/models/shared/SourceGoogleSearchConsoleDataState.md +++ /dev/null @@ -1,11 +0,0 @@ -# SourceGoogleSearchConsoleDataState - -If "final" or if this parameter is omitted, the returned data will include only finalized data. Setting this parameter to "all" should not be used with Incremental Sync mode as it may cause data loss. If "all", data will include fresh data. - - -## Values - -| Name | Value | -| -------- | -------- | -| `FINAL_` | final | -| `ALL` | all | \ No newline at end of file diff --git a/docs/models/shared/SourceGoogleSheets.md b/docs/models/shared/SourceGoogleSheets.md index e2ffcf7e7..ae7a8b1ac 100755 --- a/docs/models/shared/SourceGoogleSheets.md +++ b/docs/models/shared/SourceGoogleSheets.md @@ -9,6 +9,5 @@ The values required to configure the source. | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `credentials` | *Object* | :heavy_check_mark: | Credentials for connecting to the Google Sheets API | | | `namesConversion` | *Boolean* | :heavy_minus_sign: | Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based. | | -| `rowBatchSize` | *Long* | :heavy_minus_sign: | The number of rows fetched when making a Google Sheet API call. Defaults to 200. | 50 | | `sourceType` | [SourceGoogleSheetsGoogleSheets](../../models/shared/SourceGoogleSheetsGoogleSheets.md) | :heavy_check_mark: | N/A | | | `spreadsheetId` | *String* | :heavy_check_mark: | Enter the link to the Google spreadsheet you want to sync. To copy the link, click the 'Share' button in the top-right corner of the spreadsheet, then click 'Copy link'. | https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG-arw2xy4HR3D-dwUb/edit | \ No newline at end of file diff --git a/docs/models/shared/SourceIntercom.md b/docs/models/shared/SourceIntercom.md index a6764ebec..e4e3ed768 100755 --- a/docs/models/shared/SourceIntercom.md +++ b/docs/models/shared/SourceIntercom.md @@ -8,5 +8,7 @@ The values required to configure the source. | Field | Type | Required | Description | Example | | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `accessToken` | *String* | :heavy_check_mark: | Access token for making authenticated requests. See the Intercom docs for more information. | | +| `clientId` | *String* | :heavy_minus_sign: | Client Id for your Intercom application. | | +| `clientSecret` | *String* | :heavy_minus_sign: | Client Secret for your Intercom application. | | | `sourceType` | [SourceIntercomIntercom](../../models/shared/SourceIntercomIntercom.md) | :heavy_check_mark: | N/A | | | `startDate` | [OffsetDateTime](https://docs.oracle.com/javase/8/docs/api/java/time/OffsetDateTime.html) | :heavy_check_mark: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. | 2020-11-16T00:00:00Z | \ No newline at end of file diff --git a/docs/models/shared/SourceLemlist.md b/docs/models/shared/SourceLemlist.md index de1dab91b..91e916ac6 100755 --- a/docs/models/shared/SourceLemlist.md +++ b/docs/models/shared/SourceLemlist.md @@ -7,5 +7,5 @@ The values required to configure the source. | Field | Type | Required | Description | | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | -| `apiKey` | *String* | :heavy_check_mark: | Lemlist API key. | +| `apiKey` | *String* | :heavy_check_mark: | Lemlist API key, | | `sourceType` | [SourceLemlistLemlist](../../models/shared/SourceLemlistLemlist.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/SourceLinkedinAds.md b/docs/models/shared/SourceLinkedinAds.md index fad7242d5..7a4b64a15 100755 --- a/docs/models/shared/SourceLinkedinAds.md +++ b/docs/models/shared/SourceLinkedinAds.md @@ -5,10 +5,10 @@ The values required to configure the source. ## Fields -| Field | Type | Required | Description | Example | -| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `accountIds` | List<*Long*> | :heavy_minus_sign: | Specify the account IDs separated by a space, to pull the data from. Leave empty, if you want to pull the data from all associated accounts. See the LinkedIn Ads docs for more info. | | -| `adAnalyticsReports` | List<[SourceLinkedinAdsAdAnalyticsReportConfiguration](../../models/shared/SourceLinkedinAdsAdAnalyticsReportConfiguration.md)> | :heavy_minus_sign: | N/A | | -| `credentials` | *Object* | :heavy_minus_sign: | N/A | | -| `sourceType` | [SourceLinkedinAdsLinkedinAds](../../models/shared/SourceLinkedinAdsLinkedinAds.md) | :heavy_check_mark: | N/A | | -| `startDate` | [LocalDate](https://docs.oracle.com/javase/8/docs/api/java/time/LocalDate.html) | :heavy_check_mark: | UTC date in the format 2020-09-17. Any data before this date will not be replicated. | 2021-05-17 | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `accountIds` | List<*Long*> | :heavy_minus_sign: | Specify the account IDs to pull data from, separated by a space. Leave this field empty if you want to pull the data from all accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs. | | +| `adAnalyticsReports` | List<[SourceLinkedinAdsAdAnalyticsReportConfiguration](../../models/shared/SourceLinkedinAdsAdAnalyticsReportConfiguration.md)> | :heavy_minus_sign: | N/A | | +| `credentials` | *Object* | :heavy_minus_sign: | N/A | | +| `sourceType` | [SourceLinkedinAdsLinkedinAds](../../models/shared/SourceLinkedinAdsLinkedinAds.md) | :heavy_check_mark: | N/A | | +| `startDate` | [LocalDate](https://docs.oracle.com/javase/8/docs/api/java/time/LocalDate.html) | :heavy_check_mark: | UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. | 2021-05-17 | \ No newline at end of file diff --git a/docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfiguration.md b/docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfiguration.md index 931273cc0..5a234b962 100755 --- a/docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfiguration.md +++ b/docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfiguration.md @@ -5,8 +5,8 @@ Config for custom ad Analytics Report ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `name` | *String* | :heavy_check_mark: | The name for the report | -| `pivotBy` | [SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy](../../models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy.md) | :heavy_check_mark: | Select value from list to pivot by | -| `timeGranularity` | [SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity](../../models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity.md) | :heavy_check_mark: | Set time granularity for report:
ALL - Results grouped into a single result across the entire time range of the report.
DAILY - Results grouped by day.
MONTHLY - Results grouped by month.
YEARLY - Results grouped by year. | \ No newline at end of file +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `name` | *String* | :heavy_check_mark: | The name for the custom report. | +| `pivotBy` | [SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory](../../models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory.md) | :heavy_check_mark: | Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives. | +| `timeGranularity` | [SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity](../../models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity.md) | :heavy_check_mark: | Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods. | \ No newline at end of file diff --git a/docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy.md b/docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory.md similarity index 86% rename from docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy.md rename to docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory.md index adce9f542..fc313f2c4 100755 --- a/docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy.md +++ b/docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory.md @@ -1,6 +1,6 @@ -# SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy +# SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory -Select value from list to pivot by +Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives. ## Values diff --git a/docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity.md b/docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity.md index 667c5ce6c..c0f587900 100755 --- a/docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity.md +++ b/docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity.md @@ -1,10 +1,6 @@ # SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity -Set time granularity for report: -ALL - Results grouped into a single result across the entire time range of the report. -DAILY - Results grouped by day. -MONTHLY - Results grouped by month. -YEARLY - Results grouped by year. +Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods. ## Values diff --git a/docs/models/shared/SourceLinkedinAdsCredentialsAccessToken.md b/docs/models/shared/SourceLinkedinAdsCredentialsAccessToken.md index 49fd3d23a..3c6a1cfc6 100755 --- a/docs/models/shared/SourceLinkedinAdsCredentialsAccessToken.md +++ b/docs/models/shared/SourceLinkedinAdsCredentialsAccessToken.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `accessToken` | *String* | :heavy_check_mark: | The token value generated using the authentication code. See the docs to obtain yours. | -| `authMethod` | [SourceLinkedinAdsCredentialsAccessTokenAuthMethod](../../models/shared/SourceLinkedinAdsCredentialsAccessTokenAuthMethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `accessToken` | *String* | :heavy_check_mark: | The access token generated for your developer application. Refer to our documentation for more information. | +| `authMethod` | [SourceLinkedinAdsCredentialsAccessTokenAuthMethod](../../models/shared/SourceLinkedinAdsCredentialsAccessTokenAuthMethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/shared/SourceLinkedinAdsCredentialsOAuth20.md b/docs/models/shared/SourceLinkedinAdsCredentialsOAuth20.md index 87db01ace..5d9ca12c7 100755 --- a/docs/models/shared/SourceLinkedinAdsCredentialsOAuth20.md +++ b/docs/models/shared/SourceLinkedinAdsCredentialsOAuth20.md @@ -3,9 +3,9 @@ ## Fields -| Field | Type | Required | Description | -| --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | -| `authMethod` | [SourceLinkedinAdsCredentialsOAuth20AuthMethod](../../models/shared/SourceLinkedinAdsCredentialsOAuth20AuthMethod.md) | :heavy_minus_sign: | N/A | -| `clientId` | *String* | :heavy_check_mark: | The client ID of the LinkedIn Ads developer application. | -| `clientSecret` | *String* | :heavy_check_mark: | The client secret the LinkedIn Ads developer application. | -| `refreshToken` | *String* | :heavy_check_mark: | The key to refresh the expired access token. | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `authMethod` | [SourceLinkedinAdsCredentialsOAuth20AuthMethod](../../models/shared/SourceLinkedinAdsCredentialsOAuth20AuthMethod.md) | :heavy_minus_sign: | N/A | +| `clientId` | *String* | :heavy_check_mark: | The client ID of your developer application. Refer to our documentation for more information. | +| `clientSecret` | *String* | :heavy_check_mark: | The client secret of your developer application. Refer to our documentation for more information. | +| `refreshToken` | *String* | :heavy_check_mark: | The key to refresh the expired access token. Refer to our documentation for more information. | \ No newline at end of file diff --git a/docs/models/shared/SourceMssql.md b/docs/models/shared/SourceMssql.md index e65a4b6f2..7fb9ccb2a 100755 --- a/docs/models/shared/SourceMssql.md +++ b/docs/models/shared/SourceMssql.md @@ -5,16 +5,16 @@ The values required to configure the source. ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `database` | *String* | :heavy_check_mark: | The name of the database. | master | -| `host` | *String* | :heavy_check_mark: | The hostname of the database. | | -| `jdbcUrlParams` | *String* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). | | -| `password` | *String* | :heavy_minus_sign: | The password associated with the username. | | -| `port` | *Long* | :heavy_check_mark: | The port of the database. | 1433 | -| `replicationMethod` | *Object* | :heavy_minus_sign: | The replication method used for extracting data from the database. STANDARD replication requires no setup on the DB side but will not be able to represent deletions incrementally. CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself. | | -| `schemas` | List<*String*> | :heavy_minus_sign: | The list of schemas to sync from. Defaults to user. Case sensitive. | | -| `sourceType` | [SourceMssqlMssql](../../models/shared/SourceMssqlMssql.md) | :heavy_check_mark: | N/A | | -| `sslMethod` | *Object* | :heavy_minus_sign: | The encryption method which is used when communicating with the database. | | -| `tunnelMethod` | *Object* | :heavy_minus_sign: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | | -| `username` | *String* | :heavy_check_mark: | The username which is used to access the database. | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `database` | *String* | :heavy_check_mark: | The name of the database. | master | +| `host` | *String* | :heavy_check_mark: | The hostname of the database. | | +| `jdbcUrlParams` | *String* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). | | +| `password` | *String* | :heavy_minus_sign: | The password associated with the username. | | +| `port` | *Long* | :heavy_check_mark: | The port of the database. | 1433 | +| `replicationMethod` | *Object* | :heavy_minus_sign: | Configures how data is extracted from the database. | | +| `schemas` | List<*String*> | :heavy_minus_sign: | The list of schemas to sync from. Defaults to user. Case sensitive. | | +| `sourceType` | [SourceMssqlMssql](../../models/shared/SourceMssqlMssql.md) | :heavy_check_mark: | N/A | | +| `sslMethod` | *Object* | :heavy_minus_sign: | The encryption method which is used when communicating with the database. | | +| `tunnelMethod` | *Object* | :heavy_minus_sign: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | | +| `username` | *String* | :heavy_check_mark: | The username which is used to access the database. | | \ No newline at end of file diff --git a/docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCMethod.md b/docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCMethod.md deleted file mode 100755 index c6d3e469d..000000000 --- a/docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCMethod.md +++ /dev/null @@ -1,8 +0,0 @@ -# SourceMssqlReplicationMethodLogicalReplicationCDCMethod - - -## Values - -| Name | Value | -| ----- | ----- | -| `CDC` | CDC | \ No newline at end of file diff --git a/docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDC.md b/docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC.md similarity index 89% rename from docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDC.md rename to docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC.md index fafad5566..ae778f501 100755 --- a/docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDC.md +++ b/docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC.md @@ -1,13 +1,13 @@ -# SourceMssqlReplicationMethodLogicalReplicationCDC +# SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC -CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself. +Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database. ## Fields | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `dataToSync` | [SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync](../../models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync.md) | :heavy_minus_sign: | What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC. | +| `dataToSync` | [SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync](../../models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync.md) | :heavy_minus_sign: | What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC. | | `initialWaitingSeconds` | *Long* | :heavy_minus_sign: | The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time. | -| `method` | [SourceMssqlReplicationMethodLogicalReplicationCDCMethod](../../models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCMethod.md) | :heavy_check_mark: | N/A | -| `snapshotIsolation` | [SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel](../../models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel.md) | :heavy_minus_sign: | Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database. | \ No newline at end of file +| `method` | [SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod](../../models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod.md) | :heavy_check_mark: | N/A | +| `snapshotIsolation` | [SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel](../../models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel.md) | :heavy_minus_sign: | Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database. | \ No newline at end of file diff --git a/docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync.md b/docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync.md similarity index 84% rename from docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync.md rename to docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync.md index 5713db7c0..f0b4bdc4f 100755 --- a/docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync.md +++ b/docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync.md @@ -1,4 +1,4 @@ -# SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync +# SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC. diff --git a/docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel.md b/docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel.md similarity index 85% rename from docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel.md rename to docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel.md index 82da06669..75df12f8b 100755 --- a/docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel.md +++ b/docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel.md @@ -1,4 +1,4 @@ -# SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel +# SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database. diff --git a/docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod.md b/docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod.md new file mode 100755 index 000000000..0c8da9bb9 --- /dev/null +++ b/docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod.md @@ -0,0 +1,8 @@ +# SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod + + +## Values + +| Name | Value | +| ----- | ----- | +| `CDC` | CDC | \ No newline at end of file diff --git a/docs/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor.md b/docs/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor.md new file mode 100755 index 000000000..010d02eee --- /dev/null +++ b/docs/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor.md @@ -0,0 +1,10 @@ +# SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor + +Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `method` | [SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod](../../models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/SourceMssqlReplicationMethodStandardMethod.md b/docs/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod.md similarity index 58% rename from docs/models/shared/SourceMssqlReplicationMethodStandardMethod.md rename to docs/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod.md index 6eeb8cbd5..a3caebb95 100755 --- a/docs/models/shared/SourceMssqlReplicationMethodStandardMethod.md +++ b/docs/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod.md @@ -1,4 +1,4 @@ -# SourceMssqlReplicationMethodStandardMethod +# SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod ## Values diff --git a/docs/models/shared/SourceMssqlReplicationMethodStandard.md b/docs/models/shared/SourceMssqlReplicationMethodStandard.md deleted file mode 100755 index 1d2407fe5..000000000 --- a/docs/models/shared/SourceMssqlReplicationMethodStandard.md +++ /dev/null @@ -1,10 +0,0 @@ -# SourceMssqlReplicationMethodStandard - -Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. - - -## Fields - -| Field | Type | Required | Description | -| --------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------- | -| `method` | [SourceMssqlReplicationMethodStandardMethod](../../models/shared/SourceMssqlReplicationMethodStandardMethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/SourceOpenweatherLanguage.md b/docs/models/shared/SourceOpenweatherLanguage.md deleted file mode 100755 index 767cd068d..000000000 --- a/docs/models/shared/SourceOpenweatherLanguage.md +++ /dev/null @@ -1,58 +0,0 @@ -# SourceOpenweatherLanguage - -You can use lang parameter to get the output in your language. The contents of the description field will be translated. See here for the list of supported languages. - - -## Values - -| Name | Value | -| ------- | ------- | -| `AF` | af | -| `AL` | al | -| `AR` | ar | -| `AZ` | az | -| `BG` | bg | -| `CA` | ca | -| `CZ` | cz | -| `DA` | da | -| `DE` | de | -| `EL` | el | -| `EN` | en | -| `EU` | eu | -| `FA` | fa | -| `FI` | fi | -| `FR` | fr | -| `GL` | gl | -| `HE` | he | -| `HI` | hi | -| `HR` | hr | -| `HU` | hu | -| `ID` | id | -| `IT` | it | -| `JA` | ja | -| `KR` | kr | -| `LA` | la | -| `LT` | lt | -| `MK` | mk | -| `NO` | no | -| `NL` | nl | -| `PL` | pl | -| `PT` | pt | -| `PT_BR` | pt_br | -| `RO` | ro | -| `RU` | ru | -| `SV` | sv | -| `SE` | se | -| `SK` | sk | -| `SL` | sl | -| `SP` | sp | -| `ES` | es | -| `SR` | sr | -| `TH` | th | -| `TR` | tr | -| `UA` | ua | -| `UK` | uk | -| `VI` | vi | -| `ZH_CN` | zh_cn | -| `ZH_TW` | zh_tw | -| `ZU` | zu | \ No newline at end of file diff --git a/docs/models/shared/SourceOpenweatherOpenweather.md b/docs/models/shared/SourceOpenweatherOpenweather.md deleted file mode 100755 index 0e1601ea0..000000000 --- a/docs/models/shared/SourceOpenweatherOpenweather.md +++ /dev/null @@ -1,8 +0,0 @@ -# SourceOpenweatherOpenweather - - -## Values - -| Name | Value | -| ------------- | ------------- | -| `OPENWEATHER` | openweather | \ No newline at end of file diff --git a/docs/models/shared/SourceOpenweatherUnits.md b/docs/models/shared/SourceOpenweatherUnits.md deleted file mode 100755 index d9509eca6..000000000 --- a/docs/models/shared/SourceOpenweatherUnits.md +++ /dev/null @@ -1,12 +0,0 @@ -# SourceOpenweatherUnits - -Units of measurement. standard, metric and imperial units are available. If you do not use the units parameter, standard units will be applied by default. - - -## Values - -| Name | Value | -| ---------- | ---------- | -| `STANDARD` | standard | -| `METRIC` | metric | -| `IMPERIAL` | imperial | \ No newline at end of file diff --git a/docs/models/shared/SourcePostgres.md b/docs/models/shared/SourcePostgres.md index 29bdcf179..692c411cb 100755 --- a/docs/models/shared/SourcePostgres.md +++ b/docs/models/shared/SourcePostgres.md @@ -12,7 +12,7 @@ The values required to configure the source. | `jdbcUrlParams` | *String* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters. | | | `password` | *String* | :heavy_minus_sign: | Password associated with the username. | | | `port` | *Long* | :heavy_check_mark: | Port of the database. | 5432 | -| `replicationMethod` | *Object* | :heavy_minus_sign: | Replication method for extracting data from the database. | | +| `replicationMethod` | *Object* | :heavy_minus_sign: | Configures how data is extracted from the database. | | | `schemas` | List<*String*> | :heavy_minus_sign: | The list of schemas (case sensitive) to sync from. Defaults to public. | | | `sourceType` | [SourcePostgresPostgres](../../models/shared/SourcePostgresPostgres.md) | :heavy_check_mark: | N/A | | | `sslMode` | *Object* | :heavy_minus_sign: | SSL connection modes.
Read more in the docs. | | diff --git a/docs/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn.md b/docs/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn.md new file mode 100755 index 000000000..1244796e4 --- /dev/null +++ b/docs/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn.md @@ -0,0 +1,10 @@ +# SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn + +Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB. + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `method` | [SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod](../../models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod.md b/docs/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod.md new file mode 100755 index 000000000..b9dfd6382 --- /dev/null +++ b/docs/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod.md @@ -0,0 +1,8 @@ +# SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod + + +## Values + +| Name | Value | +| ------ | ------ | +| `XMIN` | Xmin | \ No newline at end of file diff --git a/docs/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor.md b/docs/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor.md new file mode 100755 index 000000000..5764ba39d --- /dev/null +++ b/docs/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor.md @@ -0,0 +1,10 @@ +# SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor + +Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `method` | [SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod](../../models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/SourcePostgresReplicationMethodStandardMethod.md b/docs/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod.md similarity index 57% rename from docs/models/shared/SourcePostgresReplicationMethodStandardMethod.md rename to docs/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod.md index be38ee027..a36c799d6 100755 --- a/docs/models/shared/SourcePostgresReplicationMethodStandardMethod.md +++ b/docs/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod.md @@ -1,4 +1,4 @@ -# SourcePostgresReplicationMethodStandardMethod +# SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod ## Values diff --git a/docs/models/shared/SourcePostgresReplicationMethodStandard.md b/docs/models/shared/SourcePostgresReplicationMethodStandard.md deleted file mode 100755 index d9d1124cf..000000000 --- a/docs/models/shared/SourcePostgresReplicationMethodStandard.md +++ /dev/null @@ -1,10 +0,0 @@ -# SourcePostgresReplicationMethodStandard - -Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. - - -## Fields - -| Field | Type | Required | Description | -| --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | -| `method` | [SourcePostgresReplicationMethodStandardMethod](../../models/shared/SourcePostgresReplicationMethodStandardMethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/SourcePostgresReplicationMethodStandardXminMethod.md b/docs/models/shared/SourcePostgresReplicationMethodStandardXminMethod.md deleted file mode 100755 index e4c428f50..000000000 --- a/docs/models/shared/SourcePostgresReplicationMethodStandardXminMethod.md +++ /dev/null @@ -1,8 +0,0 @@ -# SourcePostgresReplicationMethodStandardXminMethod - - -## Values - -| Name | Value | -| ------ | ------ | -| `XMIN` | Xmin | \ No newline at end of file diff --git a/docs/models/shared/SourcePosthog.md b/docs/models/shared/SourcePosthog.md index c0a438257..fab2e4260 100755 --- a/docs/models/shared/SourcePosthog.md +++ b/docs/models/shared/SourcePosthog.md @@ -9,5 +9,6 @@ The values required to configure the source. | -------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------- | | `apiKey` | *String* | :heavy_check_mark: | API Key. See the docs for information on how to generate this key. | | | `baseUrl` | *String* | :heavy_minus_sign: | Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com). | https://posthog.example.com | +| `eventsTimeStep` | *Long* | :heavy_minus_sign: | Set lower value in case of failing long running sync of events stream. | 30 | | `sourceType` | [SourcePosthogPosthog](../../models/shared/SourcePosthogPosthog.md) | :heavy_check_mark: | N/A | | | `startDate` | [OffsetDateTime](https://docs.oracle.com/javase/8/docs/api/java/time/OffsetDateTime.html) | :heavy_check_mark: | The date from which you'd like to replicate the data. Any data before this date will not be replicated. | 2021-01-01T00:00:00Z | \ No newline at end of file diff --git a/docs/models/shared/SourcePublicApis.md b/docs/models/shared/SourcePublicApis.md deleted file mode 100755 index ddc964012..000000000 --- a/docs/models/shared/SourcePublicApis.md +++ /dev/null @@ -1,10 +0,0 @@ -# SourcePublicApis - -The values required to configure the source. - - -## Fields - -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | -| `sourceType` | [SourcePublicApisPublicApis](../../models/shared/SourcePublicApisPublicApis.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/shared/SourcePublicApisPublicApis.md b/docs/models/shared/SourcePublicApisPublicApis.md deleted file mode 100755 index 66acd1b03..000000000 --- a/docs/models/shared/SourcePublicApisPublicApis.md +++ /dev/null @@ -1,8 +0,0 @@ -# SourcePublicApisPublicApis - - -## Values - -| Name | Value | -| ------------- | ------------- | -| `PUBLIC_APIS` | public-apis | \ No newline at end of file diff --git a/docs/models/shared/SourceS3.md b/docs/models/shared/SourceS3.md index 71c38e3fb..7e0351154 100755 --- a/docs/models/shared/SourceS3.md +++ b/docs/models/shared/SourceS3.md @@ -1,15 +1,22 @@ # SourceS3 -The values required to configure the source. +NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes +because it is responsible for converting legacy S3 v3 configs into v4 configs using the File-Based CDK. ## Fields -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `dataset` | *String* | :heavy_check_mark: | The name of the stream you would like this source to output. Can contain letters, numbers, or underscores. | | -| `format` | *Object* | :heavy_minus_sign: | The format of the files you'd like to replicate | | -| `pathPattern` | *String* | :heavy_check_mark: | A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use \| to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files. | ** | -| `provider` | [SourceS3S3AmazonWebServices](../../models/shared/SourceS3S3AmazonWebServices.md) | :heavy_check_mark: | Use this to load files from S3 or S3-compatible services | | -| `schema` | *String* | :heavy_minus_sign: | Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema. | {"column_1": "number", "column_2": "string", "column_3": "array", "column_4": "object", "column_5": "boolean"} | -| `sourceType` | [SourceS3S3](../../models/shared/SourceS3S3.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `awsAccessKeyId` | *String* | :heavy_minus_sign: | In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. | | +| `awsSecretAccessKey` | *String* | :heavy_minus_sign: | In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. | | +| `bucket` | *String* | :heavy_check_mark: | Name of the S3 bucket where the file(s) exist. | | +| `dataset` | *String* | :heavy_minus_sign: | Deprecated and will be removed soon. Please do not use this field anymore and use streams.name instead. The name of the stream you would like this source to output. Can contain letters, numbers, or underscores. | | +| `endpoint` | *String* | :heavy_minus_sign: | Endpoint to an S3 compatible service. Leave empty to use AWS. | | +| `format` | *Object* | :heavy_minus_sign: | Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate | | +| `pathPattern` | *String* | :heavy_minus_sign: | Deprecated and will be removed soon. Please do not use this field anymore and use streams.globs instead. A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use \| to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files. | ** | +| `provider` | [SourceS3S3AmazonWebServices](../../models/shared/SourceS3S3AmazonWebServices.md) | :heavy_minus_sign: | Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services | | +| `schema` | *String* | :heavy_minus_sign: | Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema. | {"column_1": "number", "column_2": "string", "column_3": "array", "column_4": "object", "column_5": "boolean"} | +| `sourceType` | [SourceS3S3](../../models/shared/SourceS3S3.md) | :heavy_check_mark: | N/A | | +| `startDate` | [OffsetDateTime](https://docs.oracle.com/javase/8/docs/api/java/time/OffsetDateTime.html) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated. | 2021-01-01T00:00:00.000000Z | +| `streams` | List<[SourceS3FileBasedStreamConfig](../../models/shared/SourceS3FileBasedStreamConfig.md)> | :heavy_check_mark: | Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. | | \ No newline at end of file diff --git a/docs/models/shared/SourceOpenweather.md b/docs/models/shared/SourceS3FileBasedStreamConfig.md similarity index 52% rename from docs/models/shared/SourceOpenweather.md rename to docs/models/shared/SourceS3FileBasedStreamConfig.md index c0d99d5c2..a99f79597 100755 --- a/docs/models/shared/SourceOpenweather.md +++ b/docs/models/shared/SourceS3FileBasedStreamConfig.md @@ -1,15 +1,17 @@ -# SourceOpenweather - -The values required to configure the source. +# SourceS3FileBasedStreamConfig ## Fields -| Field | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `appid` | *String* | :heavy_check_mark: | Your OpenWeather API Key. See here. The key is case sensitive. | | -| `lang` | [SourceOpenweatherLanguage](../../models/shared/SourceOpenweatherLanguage.md) | :heavy_minus_sign: | You can use lang parameter to get the output in your language. The contents of the description field will be translated. See here for the list of supported languages. | en | -| `lat` | *String* | :heavy_check_mark: | Latitude for which you want to get weather condition from. (min -90, max 90) | 45.7603 | -| `lon` | *String* | :heavy_check_mark: | Longitude for which you want to get weather condition from. (min -180, max 180) | 4.835659 | -| `sourceType` | [SourceOpenweatherOpenweather](../../models/shared/SourceOpenweatherOpenweather.md) | :heavy_check_mark: | N/A | | -| `units` | [SourceOpenweatherUnits](../../models/shared/SourceOpenweatherUnits.md) | :heavy_minus_sign: | Units of measurement. standard, metric and imperial units are available. If you do not use the units parameter, standard units will be applied by default. | standard | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `daysToSyncIfHistoryIsFull` | *Long* | :heavy_minus_sign: | When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. | +| `fileType` | *String* | :heavy_check_mark: | The data file type that is being extracted for a stream. | +| `format` | *Object* | :heavy_minus_sign: | The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. | +| `globs` | List<*String*> | :heavy_minus_sign: | The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here. | +| `inputSchema` | *String* | :heavy_minus_sign: | The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files. | +| `legacyPrefix` | *String* | :heavy_minus_sign: | The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob. | +| `name` | *String* | :heavy_check_mark: | The name of the stream. | +| `primaryKey` | *String* | :heavy_minus_sign: | The column or columns (for a composite key) that serves as the unique identifier of a record. | +| `schemaless` | *Boolean* | :heavy_minus_sign: | When enabled, syncs will not validate or structure records against the stream's schema. | +| `validationPolicy` | [SourceS3FileBasedStreamConfigValidationPolicy](../../models/shared/SourceS3FileBasedStreamConfigValidationPolicy.md) | :heavy_minus_sign: | The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. | \ No newline at end of file diff --git a/docs/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormat.md b/docs/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormat.md new file mode 100755 index 000000000..2a2c89b34 --- /dev/null +++ b/docs/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormat.md @@ -0,0 +1,11 @@ +# SourceS3FileBasedStreamConfigFormatAvroFormat + +The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `doubleAsString` | *Boolean* | :heavy_minus_sign: | Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. | +| `filetype` | [SourceS3FileBasedStreamConfigFormatAvroFormatFiletype](../../models/shared/SourceS3FileBasedStreamConfigFormatAvroFormatFiletype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormatFiletype.md b/docs/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormatFiletype.md new file mode 100755 index 000000000..cd8f1251c --- /dev/null +++ b/docs/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormatFiletype.md @@ -0,0 +1,8 @@ +# SourceS3FileBasedStreamConfigFormatAvroFormatFiletype + + +## Values + +| Name | Value | +| ------ | ------ | +| `AVRO` | avro | \ No newline at end of file diff --git a/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormat.md b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormat.md new file mode 100755 index 000000000..bb55b1d82 --- /dev/null +++ b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormat.md @@ -0,0 +1,23 @@ +# SourceS3FileBasedStreamConfigFormatCSVFormat + +The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `delimiter` | *String* | :heavy_minus_sign: | The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. | +| `doubleQuote` | *Boolean* | :heavy_minus_sign: | Whether two quotes in a quoted CSV value denote a single quote in the data. | +| `encoding` | *String* | :heavy_minus_sign: | The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options. | +| `escapeChar` | *String* | :heavy_minus_sign: | The character used for escaping special characters. To disallow escaping, leave this field blank. | +| `falseValues` | List<*String*> | :heavy_minus_sign: | A set of case-sensitive strings that should be interpreted as false values. | +| `filetype` | [SourceS3FileBasedStreamConfigFormatCSVFormatFiletype](../../models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatFiletype.md) | :heavy_minus_sign: | N/A | +| `headerDefinition` | *Object* | :heavy_minus_sign: | How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. | +| `inferenceType` | [SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType](../../models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType.md) | :heavy_minus_sign: | How to infer the types of the columns. If none, inference default to strings. | +| `nullValues` | List<*String*> | :heavy_minus_sign: | A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. | +| `quoteChar` | *String* | :heavy_minus_sign: | The character used for quoting CSV values. To disallow quoting, make this field blank. | +| `skipRowsAfterHeader` | *Long* | :heavy_minus_sign: | The number of rows to skip after the header row. | +| `skipRowsBeforeHeader` | *Long* | :heavy_minus_sign: | The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. | +| `stringsCanBeNull` | *Boolean* | :heavy_minus_sign: | Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself. | +| `trueValues` | List<*String*> | :heavy_minus_sign: | A set of case-sensitive strings that should be interpreted as true values. | \ No newline at end of file diff --git a/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatFiletype.md b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatFiletype.md new file mode 100755 index 000000000..27f0873a3 --- /dev/null +++ b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatFiletype.md @@ -0,0 +1,8 @@ +# SourceS3FileBasedStreamConfigFormatCSVFormatFiletype + + +## Values + +| Name | Value | +| ----- | ----- | +| `CSV` | csv | \ No newline at end of file diff --git a/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated.md b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated.md new file mode 100755 index 000000000..267111647 --- /dev/null +++ b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated.md @@ -0,0 +1,10 @@ +# SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated + +How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `headerDefinitionType` | [SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType](../../models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType.md b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType.md new file mode 100755 index 000000000..681ac2214 --- /dev/null +++ b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType.md @@ -0,0 +1,8 @@ +# SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType + + +## Values + +| Name | Value | +| --------------- | --------------- | +| `AUTOGENERATED` | Autogenerated | \ No newline at end of file diff --git a/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV.md b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV.md new file mode 100755 index 000000000..3089a2f1a --- /dev/null +++ b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV.md @@ -0,0 +1,10 @@ +# SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV + +How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `headerDefinitionType` | [SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType](../../models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType.md b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType.md new file mode 100755 index 000000000..49303bbe5 --- /dev/null +++ b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType.md @@ -0,0 +1,8 @@ +# SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `FROM_CSV` | From CSV | \ No newline at end of file diff --git a/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided.md b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided.md new file mode 100755 index 000000000..a34deb1fe --- /dev/null +++ b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided.md @@ -0,0 +1,11 @@ +# SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided + +How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `columnNames` | List<*String*> | :heavy_check_mark: | The column names that will be used while emitting the CSV records | +| `headerDefinitionType` | [SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType](../../models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType.md b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType.md new file mode 100755 index 000000000..05ca20b7d --- /dev/null +++ b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType.md @@ -0,0 +1,8 @@ +# SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType + + +## Values + +| Name | Value | +| --------------- | --------------- | +| `USER_PROVIDED` | User Provided | \ No newline at end of file diff --git a/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType.md b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType.md new file mode 100755 index 000000000..9007d85fb --- /dev/null +++ b/docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType.md @@ -0,0 +1,11 @@ +# SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType + +How to infer the types of the columns. If none, inference default to strings. + + +## Values + +| Name | Value | +| ---------------------- | ---------------------- | +| `NONE` | None | +| `PRIMITIVE_TYPES_ONLY` | Primitive Types Only | \ No newline at end of file diff --git a/docs/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormat.md b/docs/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormat.md new file mode 100755 index 000000000..1c1fdd786 --- /dev/null +++ b/docs/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormat.md @@ -0,0 +1,10 @@ +# SourceS3FileBasedStreamConfigFormatJsonlFormat + +The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | +| `filetype` | [SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype](../../models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype.md b/docs/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype.md new file mode 100755 index 000000000..43ae2c232 --- /dev/null +++ b/docs/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype.md @@ -0,0 +1,8 @@ +# SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype + + +## Values + +| Name | Value | +| ------- | ------- | +| `JSONL` | jsonl | \ No newline at end of file diff --git a/docs/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormat.md b/docs/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormat.md new file mode 100755 index 000000000..daa39ea28 --- /dev/null +++ b/docs/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormat.md @@ -0,0 +1,11 @@ +# SourceS3FileBasedStreamConfigFormatParquetFormat + +The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------- | +| `decimalAsFloat` | *Boolean* | :heavy_minus_sign: | Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. | +| `filetype` | [SourceS3FileBasedStreamConfigFormatParquetFormatFiletype](../../models/shared/SourceS3FileBasedStreamConfigFormatParquetFormatFiletype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormatFiletype.md b/docs/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormatFiletype.md new file mode 100755 index 000000000..77b8b365d --- /dev/null +++ b/docs/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormatFiletype.md @@ -0,0 +1,8 @@ +# SourceS3FileBasedStreamConfigFormatParquetFormatFiletype + + +## Values + +| Name | Value | +| --------- | --------- | +| `PARQUET` | parquet | \ No newline at end of file diff --git a/docs/models/shared/SourceS3FileBasedStreamConfigValidationPolicy.md b/docs/models/shared/SourceS3FileBasedStreamConfigValidationPolicy.md new file mode 100755 index 000000000..55cef1419 --- /dev/null +++ b/docs/models/shared/SourceS3FileBasedStreamConfigValidationPolicy.md @@ -0,0 +1,12 @@ +# SourceS3FileBasedStreamConfigValidationPolicy + +The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `EMIT_RECORD` | Emit Record | +| `SKIP_RECORD` | Skip Record | +| `WAIT_FOR_DISCOVER` | Wait for Discover | \ No newline at end of file diff --git a/docs/models/shared/SourceS3S3AmazonWebServices.md b/docs/models/shared/SourceS3S3AmazonWebServices.md index 142369fa2..5f4e386d3 100755 --- a/docs/models/shared/SourceS3S3AmazonWebServices.md +++ b/docs/models/shared/SourceS3S3AmazonWebServices.md @@ -1,6 +1,6 @@ # SourceS3S3AmazonWebServices -Use this to load files from S3 or S3-compatible services +Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services ## Fields @@ -9,7 +9,7 @@ Use this to load files from S3 or S3-compatible services | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | | `awsAccessKeyId` | *String* | :heavy_minus_sign: | In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. | | | `awsSecretAccessKey` | *String* | :heavy_minus_sign: | In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. | | -| `bucket` | *String* | :heavy_check_mark: | Name of the S3 bucket where the file(s) exist. | | +| `bucket` | *String* | :heavy_minus_sign: | Name of the S3 bucket where the file(s) exist. | | | `endpoint` | *String* | :heavy_minus_sign: | Endpoint to an S3 compatible service. Leave empty to use AWS. | | | `pathPrefix` | *String* | :heavy_minus_sign: | By providing a path-like prefix (e.g. myFolder/thisTable/) under which all the relevant files sit, we can optimize finding these in S3. This is optional but recommended if your bucket contains many folders/files which you don't need to replicate. | | | `startDate` | [OffsetDateTime](https://docs.oracle.com/javase/8/docs/api/java/time/OffsetDateTime.html) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any file modified before this date will not be replicated. | 2021-01-01T00:00:00Z | \ No newline at end of file diff --git a/docs/models/shared/SourceShopify.md b/docs/models/shared/SourceShopify.md index 74efb2b8c..d3325d0c3 100755 --- a/docs/models/shared/SourceShopify.md +++ b/docs/models/shared/SourceShopify.md @@ -10,4 +10,4 @@ The values required to configure the source. | `credentials` | *Object* | :heavy_minus_sign: | The authorization method to use to retrieve data from Shopify | | | `shop` | *String* | :heavy_check_mark: | The name of your Shopify store found in the URL. For example, if your URL was https://NAME.myshopify.com, then the name would be 'NAME' or 'NAME.myshopify.com'. | my-store | | `sourceType` | [SourceShopifyShopify](../../models/shared/SourceShopifyShopify.md) | :heavy_check_mark: | N/A | | -| `startDate` | [LocalDate](https://docs.oracle.com/javase/8/docs/api/java/time/LocalDate.html) | :heavy_check_mark: | The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated. | 2021-01-01 | \ No newline at end of file +| `startDate` | [LocalDate](https://docs.oracle.com/javase/8/docs/api/java/time/LocalDate.html) | :heavy_minus_sign: | The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated. | | \ No newline at end of file diff --git a/docs/models/shared/SourceStripe.md b/docs/models/shared/SourceStripe.md index 4a7e21999..641eeed3e 100755 --- a/docs/models/shared/SourceStripe.md +++ b/docs/models/shared/SourceStripe.md @@ -5,11 +5,11 @@ The values required to configure the source. ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `accountId` | *String* | :heavy_check_mark: | Your Stripe account ID (starts with 'acct_', find yours here). | | -| `clientSecret` | *String* | :heavy_check_mark: | Stripe API key (usually starts with 'sk_live_'; find yours here). | | -| `lookbackWindowDays` | *Long* | :heavy_minus_sign: | When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. More info here | | -| `sliceRange` | *Long* | :heavy_minus_sign: | The time increment used by the connector when requesting data from the Stripe API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted. | 1 | -| `sourceType` | [SourceStripeStripe](../../models/shared/SourceStripeStripe.md) | :heavy_check_mark: | N/A | | -| `startDate` | [OffsetDateTime](https://docs.oracle.com/javase/8/docs/api/java/time/OffsetDateTime.html) | :heavy_check_mark: | UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated. | 2017-01-25T00:00:00Z | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `accountId` | *String* | :heavy_check_mark: | Your Stripe account ID (starts with 'acct_', find yours here). | | +| `clientSecret` | *String* | :heavy_check_mark: | Stripe API key (usually starts with 'sk_live_'; find yours here). | | +| `lookbackWindowDays` | *Long* | :heavy_minus_sign: | When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. Applies only to streams that do not support event-based incremental syncs: CheckoutSessionLineItems, Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks. More info here | | +| `sliceRange` | *Long* | :heavy_minus_sign: | The time increment used by the connector when requesting data from the Stripe API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted. | 1 | +| `sourceType` | [SourceStripeStripe](../../models/shared/SourceStripeStripe.md) | :heavy_check_mark: | N/A | | +| `startDate` | [OffsetDateTime](https://docs.oracle.com/javase/8/docs/api/java/time/OffsetDateTime.html) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated. | 2017-01-25T00:00:00Z | \ No newline at end of file diff --git a/docs/models/shared/SourceZendeskSunshine.md b/docs/models/shared/SourceZendeskSunshine.md index 23674244c..7b6e426e9 100755 --- a/docs/models/shared/SourceZendeskSunshine.md +++ b/docs/models/shared/SourceZendeskSunshine.md @@ -9,5 +9,5 @@ The values required to configure the source. | -------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | | `credentials` | *Object* | :heavy_minus_sign: | N/A | | | `sourceType` | [SourceZendeskSunshineZendeskSunshine](../../models/shared/SourceZendeskSunshineZendeskSunshine.md) | :heavy_check_mark: | N/A | | -| `startDate` | *String* | :heavy_check_mark: | The date from which you'd like to replicate data for Zendesk Sunshine API, in the format YYYY-MM-DDT00:00:00Z. | 2021-01-01T00:00:00Z | +| `startDate` | [OffsetDateTime](https://docs.oracle.com/javase/8/docs/api/java/time/OffsetDateTime.html) | :heavy_check_mark: | The date from which you'd like to replicate data for Zendesk Sunshine API, in the format YYYY-MM-DDT00:00:00Z. | 2021-01-01T00:00:00Z | | `subdomain` | *String* | :heavy_check_mark: | The subdomain for your Zendesk Account. | | \ No newline at end of file diff --git a/docs/models/shared/SourceZendeskSunshineCredentialsAPIToken.md b/docs/models/shared/SourceZendeskSunshineCredentialsAPIToken.md new file mode 100755 index 000000000..1c5f55dd2 --- /dev/null +++ b/docs/models/shared/SourceZendeskSunshineCredentialsAPIToken.md @@ -0,0 +1,10 @@ +# SourceZendeskSunshineCredentialsAPIToken + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | +| `apiToken` | *String* | :heavy_check_mark: | API Token. See the docs for information on how to generate this key. | +| `authMethod` | [SourceZendeskSunshineCredentialsAPITokenAuthMethod](../../models/shared/SourceZendeskSunshineCredentialsAPITokenAuthMethod.md) | :heavy_check_mark: | N/A | +| `email` | *String* | :heavy_check_mark: | The user email for your Zendesk account | \ No newline at end of file diff --git a/docs/models/shared/SourceZendeskSunshineCredentialsAPITokenAuthMethod.md b/docs/models/shared/SourceZendeskSunshineCredentialsAPITokenAuthMethod.md new file mode 100755 index 000000000..086fa0c86 --- /dev/null +++ b/docs/models/shared/SourceZendeskSunshineCredentialsAPITokenAuthMethod.md @@ -0,0 +1,8 @@ +# SourceZendeskSunshineCredentialsAPITokenAuthMethod + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `API_TOKEN` | api_token | \ No newline at end of file diff --git a/docs/models/shared/SourceZendeskSunshineCredentialsOAuth20.md b/docs/models/shared/SourceZendeskSunshineCredentialsOAuth20.md new file mode 100755 index 000000000..e404e59bb --- /dev/null +++ b/docs/models/shared/SourceZendeskSunshineCredentialsOAuth20.md @@ -0,0 +1,11 @@ +# SourceZendeskSunshineCredentialsOAuth20 + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------- | +| `accessToken` | *String* | :heavy_check_mark: | Long-term access Token for making authenticated requests. | +| `authMethod` | [SourceZendeskSunshineCredentialsOAuth20AuthMethod](../../models/shared/SourceZendeskSunshineCredentialsOAuth20AuthMethod.md) | :heavy_check_mark: | N/A | +| `clientId` | *String* | :heavy_check_mark: | The Client ID of your OAuth application. | +| `clientSecret` | *String* | :heavy_check_mark: | The Client Secret of your OAuth application. | \ No newline at end of file diff --git a/docs/models/shared/SourceZendeskSunshineCredentialsOAuth20AuthMethod.md b/docs/models/shared/SourceZendeskSunshineCredentialsOAuth20AuthMethod.md new file mode 100755 index 000000000..345cf4252 --- /dev/null +++ b/docs/models/shared/SourceZendeskSunshineCredentialsOAuth20AuthMethod.md @@ -0,0 +1,8 @@ +# SourceZendeskSunshineCredentialsOAuth20AuthMethod + + +## Values + +| Name | Value | +| --------- | --------- | +| `OAUTH20` | oauth2.0 | \ No newline at end of file diff --git a/docs/models/shared/SourceZendeskSupport.md b/docs/models/shared/SourceZendeskSupport.md index 34c668978..2177d2c2c 100755 --- a/docs/models/shared/SourceZendeskSupport.md +++ b/docs/models/shared/SourceZendeskSupport.md @@ -10,5 +10,5 @@ The values required to configure the source. | `credentials` | *Object* | :heavy_minus_sign: | Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users. | | | `ignorePagination` | *Boolean* | :heavy_minus_sign: | Makes each stream read a single page of data. | | | `sourceType` | [SourceZendeskSupportZendeskSupport](../../models/shared/SourceZendeskSupportZendeskSupport.md) | :heavy_check_mark: | N/A | | -| `startDate` | [OffsetDateTime](https://docs.oracle.com/javase/8/docs/api/java/time/OffsetDateTime.html) | :heavy_check_mark: | The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. | 2020-10-15T00:00:00Z | +| `startDate` | [OffsetDateTime](https://docs.oracle.com/javase/8/docs/api/java/time/OffsetDateTime.html) | :heavy_minus_sign: | The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. | 2020-10-15T00:00:00Z | | `subdomain` | *String* | :heavy_check_mark: | This is your unique Zendesk subdomain that can be found in your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/, MY_SUBDOMAIN is the value of your subdomain. | | \ No newline at end of file diff --git a/docs/models/shared/SourcesResponse.md b/docs/models/shared/SourcesResponse.md index 31217d1ba..4cafa83f0 100755 --- a/docs/models/shared/SourcesResponse.md +++ b/docs/models/shared/SourcesResponse.md @@ -1,7 +1,5 @@ # SourcesResponse -Successful operation - ## Fields diff --git a/docs/models/shared/WorkspacesResponse.md b/docs/models/shared/WorkspacesResponse.md index 770f4a844..536b8d1f1 100755 --- a/docs/models/shared/WorkspacesResponse.md +++ b/docs/models/shared/WorkspacesResponse.md @@ -1,7 +1,5 @@ # WorkspacesResponse -Successful operation - ## Fields diff --git a/docs/sdks/airbyte/README.md b/docs/sdks/airbyte/README.md index e2e893aae..ceb3f5e44 100755 --- a/docs/sdks/airbyte/README.md +++ b/docs/sdks/airbyte/README.md @@ -2,5 +2,7 @@ ## Overview +airbyte-api: Programatically control Airbyte Cloud, OSS & Enterprise. + ### Available Operations diff --git a/docs/sdks/connections/README.md b/docs/sdks/connections/README.md index 1ceb89289..c77986c21 100755 --- a/docs/sdks/connections/README.md +++ b/docs/sdks/connections/README.md @@ -35,124 +35,41 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("aut") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("ipsa", "reiciendis") {{ + password = ""; + username = ""; + }}; }}) .build(); - com.airbyte.api.models.shared.ConnectionCreateRequest req = new ConnectionCreateRequest("19da1ffe-78f0-497b-8074-f15471b5e6e1", "3b99d488-e1e9-41e4-90ad-2abd44269802") {{ + com.airbyte.api.models.shared.ConnectionCreateRequest req = new ConnectionCreateRequest("aaa2352c-5955-4907-aff1-a3a2fa946773", "9251aa52-c3f5-4ad0-99da-1ffe78f097b0") {{ configurations = new StreamConfigurations() {{ streams = new com.airbyte.api.models.shared.StreamConfiguration[]{{ - add(new StreamConfiguration("officia") {{ - cursorField = new String[]{{ - add("alias"), - add("fugit"), - }}; - name = "Marshall Glover"; - primaryKey = new String[][]{{ - add(new String[]{{ - add("eum"), - add("non"), - add("eligendi"), - add("sint"), - }}), - add(new String[]{{ - add("provident"), - add("necessitatibus"), - }}), - }}; - syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND; - }}), - add(new StreamConfiguration("provident") {{ - cursorField = new String[]{{ - add("debitis"), - }}; - name = "Wilbur King"; - primaryKey = new String[][]{{ - add(new String[]{{ - add("dicta"), - add("magnam"), - add("cumque"), - }}), - add(new String[]{{ - add("ea"), - add("aliquid"), - add("laborum"), - add("accusamus"), - }}), - add(new String[]{{ - add("occaecati"), - }}), - add(new String[]{{ - add("accusamus"), - add("delectus"), - }}), - }}; - syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND; - }}), - add(new StreamConfiguration("nobis") {{ - cursorField = new String[]{{ - add("id"), - add("blanditiis"), - add("deleniti"), - }}; - name = "Vincent Nolan"; - primaryKey = new String[][]{{ - add(new String[]{{ - add("molestiae"), - add("perferendis"), - add("nihil"), - }}), - add(new String[]{{ - add("distinctio"), - add("id"), - }}), - add(new String[]{{ - add("labore"), - add("suscipit"), - }}), - }}; - syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND; - }}), - add(new StreamConfiguration("debitis") {{ + add(new StreamConfiguration("harum") {{ cursorField = new String[]{{ - add("vero"), - add("aspernatur"), + add("doloremque"), }}; - name = "Danielle Bosco"; + name = "Mrs. April Wuckert"; primaryKey = new String[][]{{ add(new String[]{{ - add("sint"), - add("accusantium"), - add("mollitia"), - }}), - add(new String[]{{ - add("mollitia"), - add("ad"), - add("eum"), - add("dolor"), - }}), - add(new String[]{{ - add("odit"), - add("nemo"), - add("quasi"), - add("iure"), + add("iusto"), }}), }}; - syncMode = ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY; + syncMode = ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE; }}), }}; }};; dataResidency = GeographyEnum.AUTO; - name = "Alfredo Prosacco Sr."; - namespaceDefinition = NamespaceDefinitionEnum.CUSTOM_FORMAT; + name = "Mrs. Leslie VonRueden"; + namespaceDefinition = NamespaceDefinitionEnum.DESTINATION; namespaceFormat = "${SOURCE_NAMESPACE}"; - nonBreakingSchemaUpdatesBehavior = NonBreakingSchemaUpdatesBehaviorEnum.DISABLE_CONNECTION; - prefix = "expedita"; + nonBreakingSchemaUpdatesBehavior = NonBreakingSchemaUpdatesBehaviorEnum.PROPAGATE_COLUMNS; + prefix = "pariatur"; schedule = new ConnectionSchedule(ScheduleTypeEnum.MANUAL) {{ - cronExpression = "repellat"; + cronExpression = "praesentium"; }};; - status = ConnectionStatusEnum.DEPRECATED; + status = ConnectionStatusEnum.INACTIVE; }}; CreateConnectionResponse res = sdk.connections.createConnection(req); @@ -197,12 +114,15 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("sed") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("voluptates", "quasi") {{ + password = ""; + username = ""; + }}; }}) .build(); - DeleteConnectionRequest req = new DeleteConnectionRequest("saepe"); + DeleteConnectionRequest req = new DeleteConnectionRequest("repudiandae"); DeleteConnectionResponse res = sdk.connections.deleteConnection(req); @@ -246,12 +166,15 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("pariatur") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("sint", "veritatis") {{ + password = ""; + username = ""; + }}; }}) .build(); - GetConnectionRequest req = new GetConnectionRequest("accusantium"); + GetConnectionRequest req = new GetConnectionRequest("itaque"); GetConnectionResponse res = sdk.connections.getConnection(req); @@ -295,17 +218,20 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("consequuntur") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("incidunt", "enim") {{ + password = ""; + username = ""; + }}; }}) .build(); ListConnectionsRequest req = new ListConnectionsRequest() {{ includeDeleted = false; - limit = 508315; - offset = 615560; + limit = 9356; + offset = 667411; workspaceIds = new String[]{{ - add("1cddc692-601f-4b57-ab0d-5f0d30c5fbb2"), + add("d2abd442-6980-42d5-82a9-4bb4f63c969e"), }}; }}; @@ -361,75 +287,42 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("quis") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("sint", "officia") {{ + password = ""; + username = ""; + }}; }}) .build(); PatchConnectionRequest req = new PatchConnectionRequest( new ConnectionPatchRequest() {{ configurations = new StreamConfigurations() {{ streams = new com.airbyte.api.models.shared.StreamConfiguration[]{{ - add(new StreamConfiguration("facilis") {{ - cursorField = new String[]{{ - add("eaque"), - add("quis"), - }}; - name = "Ruby Auer"; - primaryKey = new String[][]{{ - add(new String[]{{ - add("vero"), - }}), - add(new String[]{{ - add("hic"), - add("recusandae"), - }}), - }}; - syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND; - }}), - add(new StreamConfiguration("pariatur") {{ - cursorField = new String[]{{ - add("voluptatem"), - add("porro"), - add("consequuntur"), - }}; - name = "Jeremiah Beatty"; - primaryKey = new String[][]{{ - add(new String[]{{ - add("earum"), - add("modi"), - add("iste"), - add("dolorum"), - }}), - }}; - syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND; - }}), - add(new StreamConfiguration("excepturi") {{ + add(new StreamConfiguration("rerum") {{ cursorField = new String[]{{ - add("nobis"), - add("libero"), - add("delectus"), + add("dolor"), }}; - name = "Billie Jacobi"; + name = "Randal Parisian"; primaryKey = new String[][]{{ add(new String[]{{ - add("ipsum"), + add("illum"), }}), }}; syncMode = ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY; }}), }}; }};; - dataResidency = GeographyEnumNoDefault.EU; - name = "Marian Wisozk"; - namespaceDefinition = NamespaceDefinitionEnumNoDefault.SOURCE; + dataResidency = GeographyEnumNoDefault.AUTO; + name = "Blanca Schulist"; + namespaceDefinition = NamespaceDefinitionEnumNoDefault.CUSTOM_FORMAT; namespaceFormat = "${SOURCE_NAMESPACE}"; - nonBreakingSchemaUpdatesBehavior = NonBreakingSchemaUpdatesBehaviorEnumNoDefault.IGNORE; - prefix = "ipsa"; - schedule = new ConnectionSchedule(ScheduleTypeEnum.MANUAL) {{ - cronExpression = "iure"; + nonBreakingSchemaUpdatesBehavior = NonBreakingSchemaUpdatesBehaviorEnumNoDefault.PROPAGATE_FULLY; + prefix = "non"; + schedule = new ConnectionSchedule(ScheduleTypeEnum.CRON) {{ + cronExpression = "enim"; }};; - status = ConnectionStatusEnum.INACTIVE; - }};, "quaerat"); + status = ConnectionStatusEnum.DEPRECATED; + }};, "delectus"); PatchConnectionResponse res = sdk.connections.patchConnection(req); diff --git a/docs/sdks/destinations/README.md b/docs/sdks/destinations/README.md index a6d93c115..034eb6b02 100755 --- a/docs/sdks/destinations/README.md +++ b/docs/sdks/destinations/README.md @@ -162,6 +162,24 @@ import com.airbyte.api.models.shared.DestinationLangchainIndexingPinecone; import com.airbyte.api.models.shared.DestinationLangchainIndexingPineconeMode; import com.airbyte.api.models.shared.DestinationLangchainLangchain; import com.airbyte.api.models.shared.DestinationLangchainProcessingConfigModel; +import com.airbyte.api.models.shared.DestinationMilvus; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingCohere; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingCohereMode; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingFake; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingFakeMode; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingFromField; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingFromFieldMode; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingOpenAI; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingOpenAIMode; +import com.airbyte.api.models.shared.DestinationMilvusIndexing; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthAPIToken; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthAPITokenMode; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthNoAuth; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthNoAuthMode; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthUsernamePassword; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthUsernamePasswordMode; +import com.airbyte.api.models.shared.DestinationMilvusMilvus; +import com.airbyte.api.models.shared.DestinationMilvusProcessingConfigModel; import com.airbyte.api.models.shared.DestinationMongodb; import com.airbyte.api.models.shared.DestinationMongodbAuthTypeLoginPassword; import com.airbyte.api.models.shared.DestinationMongodbAuthTypeLoginPasswordAuthorization; @@ -208,6 +226,16 @@ import com.airbyte.api.models.shared.DestinationOracleTunnelMethodPasswordAuthen import com.airbyte.api.models.shared.DestinationOracleTunnelMethodPasswordAuthenticationTunnelMethod; import com.airbyte.api.models.shared.DestinationOracleTunnelMethodSSHKeyAuthentication; import com.airbyte.api.models.shared.DestinationOracleTunnelMethodSSHKeyAuthenticationTunnelMethod; +import com.airbyte.api.models.shared.DestinationPinecone; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingCohere; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingCohereMode; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingFake; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingFakeMode; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingOpenAI; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingOpenAIMode; +import com.airbyte.api.models.shared.DestinationPineconeIndexing; +import com.airbyte.api.models.shared.DestinationPineconePinecone; +import com.airbyte.api.models.shared.DestinationPineconeProcessingConfigModel; import com.airbyte.api.models.shared.DestinationPostgres; import com.airbyte.api.models.shared.DestinationPostgresPostgres; import com.airbyte.api.models.shared.DestinationPostgresSslModeAllow; @@ -335,22 +363,25 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("accusamus") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("quidem", "provident") {{ + password = ""; + username = ""; + }}; }}) .build(); - com.airbyte.api.models.shared.DestinationCreateRequest req = new DestinationCreateRequest( new DestinationPubsub(false, "fugiat", DestinationPubsubPubsub.PUBSUB, false, "ab", "soluta") {{ - batchingDelayThreshold = 976405L; - batchingElementCountThreshold = 377752L; + com.airbyte.api.models.shared.DestinationCreateRequest req = new DestinationCreateRequest( new DestinationPubsub(false, "nisi", DestinationPubsubPubsub.PUBSUB, false, "vel", "natus") {{ + batchingDelayThreshold = 659669L; + batchingElementCountThreshold = 501324L; batchingEnabled = false; - batchingRequestBytesThreshold = 617658L; - credentialsJson = "eos"; + batchingRequestBytesThreshold = 533206L; + credentialsJson = "sapiente"; destinationType = DestinationPubsubPubsub.PUBSUB; orderingEnabled = false; - projectId = "atque"; - topicId = "sit"; - }}, "dolorum", "77a89ebf-737a-4e42-83ce-5e6a95d8a0d4"); + projectId = "amet"; + topicId = "deserunt"; + }}, "omnis", "7074ba44-69b6-4e21-8195-9890afa563e2"); CreateDestinationResponse res = sdk.destinations.createDestination(req); @@ -394,12 +425,15 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("tempora") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("nemo", "quasi") {{ + password = ""; + username = ""; + }}; }}) .build(); - DeleteDestinationRequest req = new DeleteDestinationRequest("vel"); + DeleteDestinationRequest req = new DeleteDestinationRequest("iure"); DeleteDestinationResponse res = sdk.destinations.deleteDestination(req); @@ -443,12 +477,15 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("quod") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("doloribus", "debitis") {{ + password = ""; + username = ""; + }}; }}) .build(); - GetDestinationRequest req = new GetDestinationRequest("officiis"); + GetDestinationRequest req = new GetDestinationRequest("eius"); GetDestinationResponse res = sdk.destinations.getDestination(req); @@ -492,18 +529,20 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("qui") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("maxime", "deleniti") {{ + password = ""; + username = ""; + }}; }}) .build(); ListDestinationsRequest req = new ListDestinationsRequest() {{ includeDeleted = false; - limit = 679880; - offset = 952792; + limit = 703889; + offset = 447926; workspaceIds = new String[]{{ - add("a73cf3be-453f-4870-b326-b5a73429cdb1"), - add("a8422bb6-79d2-4322-b15b-f0cbb1e31b8b"), + add("11e5b7fd-2ed0-4289-a1cd-dc692601fb57"), }}; }}; @@ -684,6 +723,24 @@ import com.airbyte.api.models.shared.DestinationLangchainIndexingPinecone; import com.airbyte.api.models.shared.DestinationLangchainIndexingPineconeMode; import com.airbyte.api.models.shared.DestinationLangchainLangchain; import com.airbyte.api.models.shared.DestinationLangchainProcessingConfigModel; +import com.airbyte.api.models.shared.DestinationMilvus; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingCohere; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingCohereMode; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingFake; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingFakeMode; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingFromField; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingFromFieldMode; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingOpenAI; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingOpenAIMode; +import com.airbyte.api.models.shared.DestinationMilvusIndexing; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthAPIToken; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthAPITokenMode; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthNoAuth; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthNoAuthMode; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthUsernamePassword; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthUsernamePasswordMode; +import com.airbyte.api.models.shared.DestinationMilvusMilvus; +import com.airbyte.api.models.shared.DestinationMilvusProcessingConfigModel; import com.airbyte.api.models.shared.DestinationMongodb; import com.airbyte.api.models.shared.DestinationMongodbAuthTypeLoginPassword; import com.airbyte.api.models.shared.DestinationMongodbAuthTypeLoginPasswordAuthorization; @@ -731,6 +788,16 @@ import com.airbyte.api.models.shared.DestinationOracleTunnelMethodPasswordAuthen import com.airbyte.api.models.shared.DestinationOracleTunnelMethodSSHKeyAuthentication; import com.airbyte.api.models.shared.DestinationOracleTunnelMethodSSHKeyAuthenticationTunnelMethod; import com.airbyte.api.models.shared.DestinationPatchRequest; +import com.airbyte.api.models.shared.DestinationPinecone; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingCohere; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingCohereMode; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingFake; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingFakeMode; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingOpenAI; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingOpenAIMode; +import com.airbyte.api.models.shared.DestinationPineconeIndexing; +import com.airbyte.api.models.shared.DestinationPineconePinecone; +import com.airbyte.api.models.shared.DestinationPineconeProcessingConfigModel; import com.airbyte.api.models.shared.DestinationPostgres; import com.airbyte.api.models.shared.DestinationPostgresPostgres; import com.airbyte.api.models.shared.DestinationPostgresSslModeAllow; @@ -858,19 +925,24 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("cupiditate") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("autem", "nam") {{ + password = ""; + username = ""; + }}; }}) .build(); - PatchDestinationRequest req = new PatchDestinationRequest("aperiam") {{ + PatchDestinationRequest req = new PatchDestinationRequest("eaque") {{ destinationPatchRequest = new DestinationPatchRequest() {{ - configuration = new DestinationVertica("dolorem", DestinationVerticaVertica.VERTICA, "dolore", 5433L, "labore", "adipisci") {{ - jdbcUrlParams = "dolorum"; - password = "architecto"; - tunnelMethod = new DestinationVerticaTunnelMethodNoTunnel(DestinationVerticaTunnelMethodNoTunnelTunnelMethod.NO_TUNNEL);; + configuration = new DestinationSnowflake("AIRBYTE_DATABASE", DestinationSnowflakeSnowflake.SNOWFLAKE, "accountname.us-east-2.aws.snowflakecomputing.com", "AIRBYTE_ROLE", "AIRBYTE_SCHEMA", "AIRBYTE_USER", "AIRBYTE_WAREHOUSE") {{ + credentials = new DestinationSnowflakeCredentialsUsernameAndPassword("perferendis") {{ + authType = DestinationSnowflakeCredentialsUsernameAndPasswordAuthType.USERNAME_AND_PASSWORD; + }};; + jdbcUrlParams = "fugiat"; + rawDataSchema = "amet"; }};; - name = "Miss Billie Ward"; + name = "Erma Hessel"; }};; }}; @@ -1051,6 +1123,24 @@ import com.airbyte.api.models.shared.DestinationLangchainIndexingPinecone; import com.airbyte.api.models.shared.DestinationLangchainIndexingPineconeMode; import com.airbyte.api.models.shared.DestinationLangchainLangchain; import com.airbyte.api.models.shared.DestinationLangchainProcessingConfigModel; +import com.airbyte.api.models.shared.DestinationMilvus; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingCohere; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingCohereMode; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingFake; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingFakeMode; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingFromField; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingFromFieldMode; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingOpenAI; +import com.airbyte.api.models.shared.DestinationMilvusEmbeddingOpenAIMode; +import com.airbyte.api.models.shared.DestinationMilvusIndexing; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthAPIToken; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthAPITokenMode; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthNoAuth; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthNoAuthMode; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthUsernamePassword; +import com.airbyte.api.models.shared.DestinationMilvusIndexingAuthUsernamePasswordMode; +import com.airbyte.api.models.shared.DestinationMilvusMilvus; +import com.airbyte.api.models.shared.DestinationMilvusProcessingConfigModel; import com.airbyte.api.models.shared.DestinationMongodb; import com.airbyte.api.models.shared.DestinationMongodbAuthTypeLoginPassword; import com.airbyte.api.models.shared.DestinationMongodbAuthTypeLoginPasswordAuthorization; @@ -1097,6 +1187,16 @@ import com.airbyte.api.models.shared.DestinationOracleTunnelMethodPasswordAuthen import com.airbyte.api.models.shared.DestinationOracleTunnelMethodPasswordAuthenticationTunnelMethod; import com.airbyte.api.models.shared.DestinationOracleTunnelMethodSSHKeyAuthentication; import com.airbyte.api.models.shared.DestinationOracleTunnelMethodSSHKeyAuthenticationTunnelMethod; +import com.airbyte.api.models.shared.DestinationPinecone; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingCohere; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingCohereMode; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingFake; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingFakeMode; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingOpenAI; +import com.airbyte.api.models.shared.DestinationPineconeEmbeddingOpenAIMode; +import com.airbyte.api.models.shared.DestinationPineconeIndexing; +import com.airbyte.api.models.shared.DestinationPineconePinecone; +import com.airbyte.api.models.shared.DestinationPineconeProcessingConfigModel; import com.airbyte.api.models.shared.DestinationPostgres; import com.airbyte.api.models.shared.DestinationPostgresPostgres; import com.airbyte.api.models.shared.DestinationPostgresSslModeAllow; @@ -1225,20 +1325,65 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("porro") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("nobis", "dolores") {{ + password = ""; + username = ""; + }}; }}) .build(); - PutDestinationRequest req = new PutDestinationRequest("doloribus") {{ - destinationPutRequest = new DestinationPutRequest( new DestinationDevNull(DestinationDevNullDevNull.DEV_NULL, new DestinationDevNullTestDestinationSilent(DestinationDevNullTestDestinationSilentTestDestinationType.SILENT) {{ - testDestinationType = DestinationDevNullTestDestinationSilentTestDestinationType.SILENT; - }}) {{ - destinationType = DestinationDevNullDevNull.DEV_NULL; - testDestination = new DestinationDevNullTestDestinationSilent(DestinationDevNullTestDestinationSilentTestDestinationType.SILENT) {{ - testDestinationType = DestinationDevNullTestDestinationSilentTestDestinationType.SILENT; + PutDestinationRequest req = new PutDestinationRequest("quis") {{ + destinationPutRequest = new DestinationPutRequest( new DestinationMilvus(DestinationMilvusMilvus.MILVUS, new DestinationMilvusEmbeddingFromField(384L, "vector") {{ + dimensions = 1536L; + fieldName = "vector"; + mode = DestinationMilvusEmbeddingFromFieldMode.FROM_FIELD; + }}, new DestinationMilvusIndexing( new DestinationMilvusIndexingAuthNoAuth() {{ + mode = DestinationMilvusIndexingAuthNoAuthMode.NO_AUTH; + }}, "provident", "tcp://my-local-milvus:19530") {{ + db = "libero"; + textField = "delectus"; + vectorField = "quaerat"; + }};, new DestinationMilvusProcessingConfigModel(554242L) {{ + chunkOverlap = 398221L; + metadataFields = new String[]{{ + add("dolorem"), + }}; + textFields = new String[]{{ + add("dolorem"), + }}; + }};) {{ + destinationType = DestinationMilvusMilvus.MILVUS; + embedding = new DestinationMilvusEmbeddingCohere("quis") {{ + cohereKey = "eaque"; + mode = DestinationMilvusEmbeddingCohereMode.COHERE; }}; - }}, "facilis");; + indexing = new DestinationMilvusIndexing( new DestinationMilvusIndexingAuthUsernamePassword("porro", "consequuntur") {{ + mode = DestinationMilvusIndexingAuthUsernamePasswordMode.USERNAME_PASSWORD; + password = "hic"; + username = "Timothy_Mohr3"; + }}, "blanditiis", "tcp://host.docker.internal:19530") {{ + auth = new DestinationMilvusIndexingAuthAPIToken("perferendis") {{ + mode = DestinationMilvusIndexingAuthAPITokenMode.TOKEN; + token = "eos"; + }}; + collection = "dolores"; + db = "minus"; + host = "tcp://host.docker.internal:19530"; + textField = "dolor"; + vectorField = "vero"; + }}; + processing = new DestinationMilvusProcessingConfigModel(992397L) {{ + chunkOverlap = 50370L; + chunkSize = 577229L; + metadataFields = new String[]{{ + add("rerum"), + }}; + textFields = new String[]{{ + add("adipisci"), + }}; + }}; + }}, "dolor");; }}; PutDestinationResponse res = sdk.destinations.putDestination(req); diff --git a/docs/sdks/jobs/README.md b/docs/sdks/jobs/README.md index b318a0879..57fddf6c6 100755 --- a/docs/sdks/jobs/README.md +++ b/docs/sdks/jobs/README.md @@ -25,12 +25,15 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("cupiditate") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("qui", "ipsum") {{ + password = ""; + username = ""; + }}; }}) .build(); - CancelJobRequest req = new CancelJobRequest(181631L); + CancelJobRequest req = new CancelJobRequest(944373L); CancelJobResponse res = sdk.jobs.cancelJob(req); @@ -75,12 +78,15 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("quae") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("excepturi", "cum") {{ + password = ""; + username = ""; + }}; }}) .build(); - com.airbyte.api.models.shared.JobCreateRequest req = new JobCreateRequest("laudantium", JobTypeEnum.SYNC); + com.airbyte.api.models.shared.JobCreateRequest req = new JobCreateRequest("voluptate", JobTypeEnum.SYNC); CreateJobResponse res = sdk.jobs.createJob(req); @@ -124,12 +130,15 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("occaecati") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("reiciendis", "amet") {{ + password = ""; + username = ""; + }}; }}) .build(); - GetJobRequest req = new GetJobRequest(977496L); + GetJobRequest req = new GetJobRequest(680545L); GetJobResponse res = sdk.jobs.getJob(req); @@ -176,26 +185,27 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("quisquam") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("numquam", "veritatis") {{ + password = ""; + username = ""; + }}; }}) .build(); ListJobsRequest req = new ListJobsRequest() {{ - connectionId = "vero"; - createdAtEnd = OffsetDateTime.parse("2022-04-29T03:27:17.664Z"); - createdAtStart = OffsetDateTime.parse("2022-01-15T00:38:43.469Z"); - jobType = JobTypeEnum.SYNC; - limit = 231701; - offset = 878870; - orderBy = "tenetur"; - status = JobStatusEnum.INCOMPLETE; - updatedAtEnd = OffsetDateTime.parse("2020-11-08T11:03:10.206Z"); - updatedAtStart = OffsetDateTime.parse("2021-07-17T15:43:35.984Z"); + connectionId = "ipsa"; + createdAtEnd = OffsetDateTime.parse("2022-07-26T10:30:36.625Z"); + createdAtStart = OffsetDateTime.parse("2022-09-09T04:40:04.540Z"); + jobType = JobTypeEnum.RESET; + limit = 696344; + offset = 976405; + orderBy = "voluptas"; + status = JobStatusEnum.FAILED; + updatedAtEnd = OffsetDateTime.parse("2022-06-16T23:42:38.113Z"); + updatedAtStart = OffsetDateTime.parse("2022-02-23T01:35:05.899Z"); workspaceIds = new String[]{{ - add("bd74dd39-c0f5-4d2c-bf7c-70a45626d436"), - add("813f16d9-f5fc-4e6c-9561-46c3e250fb00"), - add("8c42e141-aac3-466c-8dd6-b14429074747"), + add("1ba77a89-ebf7-437a-a420-3ce5e6a95d8a"), }}; }}; diff --git a/docs/sdks/sources/README.md b/docs/sdks/sources/README.md index 7c84ab904..9a8a0fe15 100755 --- a/docs/sdks/sources/README.md +++ b/docs/sdks/sources/README.md @@ -134,11 +134,6 @@ import com.airbyte.api.models.shared.SourceConfluenceConfluence; import com.airbyte.api.models.shared.SourceConvex; import com.airbyte.api.models.shared.SourceConvexConvex; import com.airbyte.api.models.shared.SourceCreateRequest; -import com.airbyte.api.models.shared.SourceDatadog; -import com.airbyte.api.models.shared.SourceDatadogDatadog; -import com.airbyte.api.models.shared.SourceDatadogQueries; -import com.airbyte.api.models.shared.SourceDatadogQueriesDataSource; -import com.airbyte.api.models.shared.SourceDatadogSite; import com.airbyte.api.models.shared.SourceDatascope; import com.airbyte.api.models.shared.SourceDatascopeDatascope; import com.airbyte.api.models.shared.SourceDelighted; @@ -266,7 +261,9 @@ import com.airbyte.api.models.shared.SourceGoogleSearchConsoleAuthorizationOAuth import com.airbyte.api.models.shared.SourceGoogleSearchConsoleAuthorizationOAuthAuthType; import com.airbyte.api.models.shared.SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthentication; import com.airbyte.api.models.shared.SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthenticationAuthType; -import com.airbyte.api.models.shared.SourceGoogleSearchConsoleDataState; +import com.airbyte.api.models.shared.SourceGoogleSearchConsoleCustomReportConfig; +import com.airbyte.api.models.shared.SourceGoogleSearchConsoleCustomReportConfigValidEnums; +import com.airbyte.api.models.shared.SourceGoogleSearchConsoleDataFreshness; import com.airbyte.api.models.shared.SourceGoogleSearchConsoleGoogleSearchConsole; import com.airbyte.api.models.shared.SourceGoogleSheets; import com.airbyte.api.models.shared.SourceGoogleSheetsCredentialsAuthenticateViaGoogleOAuth; @@ -330,7 +327,7 @@ import com.airbyte.api.models.shared.SourceLeverHiringEnvironment; import com.airbyte.api.models.shared.SourceLeverHiringLeverHiring; import com.airbyte.api.models.shared.SourceLinkedinAds; import com.airbyte.api.models.shared.SourceLinkedinAdsAdAnalyticsReportConfiguration; -import com.airbyte.api.models.shared.SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy; +import com.airbyte.api.models.shared.SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory; import com.airbyte.api.models.shared.SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity; import com.airbyte.api.models.shared.SourceLinkedinAdsCredentialsAccessToken; import com.airbyte.api.models.shared.SourceLinkedinAdsCredentialsAccessTokenAuthMethod; @@ -390,12 +387,12 @@ import com.airbyte.api.models.shared.SourceMongodbInternalPocMongodbInternalPoc; import com.airbyte.api.models.shared.SourceMongodbMongodb; import com.airbyte.api.models.shared.SourceMssql; import com.airbyte.api.models.shared.SourceMssqlMssql; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodLogicalReplicationCDC; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodLogicalReplicationCDCMethod; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodStandard; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodStandardMethod; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod; import com.airbyte.api.models.shared.SourceMssqlSslMethodEncryptedTrustServerCertificate; import com.airbyte.api.models.shared.SourceMssqlSslMethodEncryptedTrustServerCertificateSslMethod; import com.airbyte.api.models.shared.SourceMssqlSslMethodEncryptedVerifyCertificate; @@ -451,10 +448,6 @@ import com.airbyte.api.models.shared.SourceOmnisendOmnisend; import com.airbyte.api.models.shared.SourceOnesignal; import com.airbyte.api.models.shared.SourceOnesignalApplications; import com.airbyte.api.models.shared.SourceOnesignalOnesignal; -import com.airbyte.api.models.shared.SourceOpenweather; -import com.airbyte.api.models.shared.SourceOpenweatherLanguage; -import com.airbyte.api.models.shared.SourceOpenweatherOpenweather; -import com.airbyte.api.models.shared.SourceOpenweatherUnits; import com.airbyte.api.models.shared.SourceOracle; import com.airbyte.api.models.shared.SourceOracleConnectionDataServiceName; import com.airbyte.api.models.shared.SourceOracleConnectionDataServiceNameConnectionType; @@ -519,10 +512,10 @@ import com.airbyte.api.models.shared.SourcePolygonStockApi; import com.airbyte.api.models.shared.SourcePolygonStockApiPolygonStockApi; import com.airbyte.api.models.shared.SourcePostgres; import com.airbyte.api.models.shared.SourcePostgresPostgres; -import com.airbyte.api.models.shared.SourcePostgresReplicationMethodStandard; -import com.airbyte.api.models.shared.SourcePostgresReplicationMethodStandardMethod; -import com.airbyte.api.models.shared.SourcePostgresReplicationMethodStandardXmin; -import com.airbyte.api.models.shared.SourcePostgresReplicationMethodStandardXminMethod; +import com.airbyte.api.models.shared.SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn; +import com.airbyte.api.models.shared.SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod; +import com.airbyte.api.models.shared.SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor; +import com.airbyte.api.models.shared.SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod; import com.airbyte.api.models.shared.SourcePostgresTunnelMethodNoTunnel; import com.airbyte.api.models.shared.SourcePostgresTunnelMethodNoTunnelTunnelMethod; import com.airbyte.api.models.shared.SourcePostgresTunnelMethodPasswordAuthentication; @@ -535,8 +528,6 @@ import com.airbyte.api.models.shared.SourcePostmarkapp; import com.airbyte.api.models.shared.SourcePostmarkappPostmarkapp; import com.airbyte.api.models.shared.SourcePrestashop; import com.airbyte.api.models.shared.SourcePrestashopPrestashop; -import com.airbyte.api.models.shared.SourcePublicApis; -import com.airbyte.api.models.shared.SourcePublicApisPublicApis; import com.airbyte.api.models.shared.SourcePunkApi; import com.airbyte.api.models.shared.SourcePunkApiPunkApi; import com.airbyte.api.models.shared.SourcePypi; @@ -566,6 +557,23 @@ import com.airbyte.api.models.shared.SourceRkiCovidRkiCovid; import com.airbyte.api.models.shared.SourceRss; import com.airbyte.api.models.shared.SourceRssRss; import com.airbyte.api.models.shared.SourceS3; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfig; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatAvroFormat; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatAvroFormatFiletype; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormat; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatFiletype; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatJsonlFormat; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatParquetFormat; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatParquetFormatFiletype; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigValidationPolicy; import com.airbyte.api.models.shared.SourceS3FormatAvro; import com.airbyte.api.models.shared.SourceS3FormatAvroFiletype; import com.airbyte.api.models.shared.SourceS3FormatCSV; @@ -734,6 +742,10 @@ import com.airbyte.api.models.shared.SourceZendeskChatCredentialsOAuth20; import com.airbyte.api.models.shared.SourceZendeskChatCredentialsOAuth20Credentials; import com.airbyte.api.models.shared.SourceZendeskChatZendeskChat; import com.airbyte.api.models.shared.SourceZendeskSunshine; +import com.airbyte.api.models.shared.SourceZendeskSunshineCredentialsAPIToken; +import com.airbyte.api.models.shared.SourceZendeskSunshineCredentialsAPITokenAuthMethod; +import com.airbyte.api.models.shared.SourceZendeskSunshineCredentialsOAuth20; +import com.airbyte.api.models.shared.SourceZendeskSunshineCredentialsOAuth20AuthMethod; import com.airbyte.api.models.shared.SourceZendeskSunshineZendeskSunshine; import com.airbyte.api.models.shared.SourceZendeskSupport; import com.airbyte.api.models.shared.SourceZendeskSupportZendeskSupport; @@ -759,44 +771,24 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("esse") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("alias", "at") {{ + password = ""; + username = ""; + }}; }}) .build(); - com.airbyte.api.models.shared.SourceCreateRequest req = new SourceCreateRequest( new SourceMysql("cupiditate", "consequatur", 3306L, new SourceMysqlReplicationMethodReadChangesUsingBinaryLogCDC(SourceMysqlReplicationMethodReadChangesUsingBinaryLogCDCMethod.CDC) {{ - initialWaitingSeconds = 892050L; - method = SourceMysqlReplicationMethodReadChangesUsingBinaryLogCDCMethod.CDC; - serverTimeZone = "ipsam"; - }}, SourceMysqlMysql.MYSQL, "aspernatur") {{ - database = "fuga"; - host = "reprehenderit"; - jdbcUrlParams = "quidem"; - password = "fugiat"; - port = 3306L; - replicationMethod = new SourceMysqlReplicationMethodReadChangesUsingBinaryLogCDC(SourceMysqlReplicationMethodReadChangesUsingBinaryLogCDCMethod.CDC) {{ - initialWaitingSeconds = 433439L; - method = SourceMysqlReplicationMethodReadChangesUsingBinaryLogCDCMethod.CDC; - serverTimeZone = "suscipit"; + com.airbyte.api.models.shared.SourceCreateRequest req = new SourceCreateRequest( new SourceGoogleDirectory(SourceGoogleDirectoryGoogleDirectory.GOOGLE_DIRECTORY) {{ + credentials = new SourceGoogleDirectoryCredentialsSignInViaGoogleOAuth("qui", "dolorum", "a") {{ + clientId = "vel"; + clientSecret = "quod"; + credentialsTitle = SourceGoogleDirectoryCredentialsSignInViaGoogleOAuthCredentialsTitle.WEB_SERVER_APP; + refreshToken = "officiis"; }}; - sourceType = SourceMysqlMysql.MYSQL; - sslMode = new SourceMysqlSslModeVerifyIdentity("ipsa", SourceMysqlSslModeVerifyIdentityMode.VERIFY_IDENTITY) {{ - caCertificate = "eos"; - clientCertificate = "praesentium"; - clientKey = "quisquam"; - clientKeyPassword = "veritatis"; - mode = SourceMysqlSslModeVerifyIdentityMode.VERIFY_IDENTITY; - }}; - tunnelMethod = new SourceMysqlTunnelMethodSSHKeyAuthentication("illum", "quo", SourceMysqlTunnelMethodSSHKeyAuthenticationTunnelMethod.SSH_KEY_AUTH, 22L, "fuga") {{ - sshKey = "quidem"; - tunnelHost = "neque"; - tunnelMethod = SourceMysqlTunnelMethodSSHKeyAuthenticationTunnelMethod.SSH_KEY_AUTH; - tunnelPort = 22L; - tunnelUser = "quo"; - }}; - username = "Destini.Daugherty"; - }}, "sequi", "c7e0bc71-78e4-4796-b2a7-0c688282aa48") {{ - secretId = "explicabo"; + sourceType = SourceGoogleDirectoryGoogleDirectory.GOOGLE_DIRECTORY; + }}, "esse", "a73cf3be-453f-4870-b326-b5a73429cdb1") {{ + secretId = "laborum"; }}; CreateSourceResponse res = sdk.sources.createSource(req); @@ -841,12 +833,15 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("minima") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("totam", "incidunt") {{ + password = ""; + username = ""; + }}; }}) .build(); - DeleteSourceRequest req = new DeleteSourceRequest("nisi"); + DeleteSourceRequest req = new DeleteSourceRequest("aspernatur"); DeleteSourceResponse res = sdk.sources.deleteSource(req); @@ -890,12 +885,15 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("fugit") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("dolores", "distinctio") {{ + password = ""; + username = ""; + }}; }}) .build(); - GetSourceRequest req = new GetSourceRequest("sapiente"); + GetSourceRequest req = new GetSourceRequest("facilis"); GetSourceResponse res = sdk.sources.getSource(req); @@ -945,12 +943,15 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("consequuntur") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("aliquid", "quam") {{ + password = ""; + username = ""; + }}; }}) .build(); - com.airbyte.api.models.shared.InitiateOauthRequest req = new InitiateOauthRequest("ratione", OAuthActorNames.FACEBOOK_MARKETING, "e9817ee1-7cbe-461e-ab7b-95bc0ab3c20c") {{ + com.airbyte.api.models.shared.InitiateOauthRequest req = new InitiateOauthRequest("molestias", OAuthActorNames.TRELLO, "2322715b-f0cb-4b1e-b1b8-b90f3443a110") {{ oAuthInputConfiguration = new OAuthInputConfiguration();; }}; @@ -996,18 +997,20 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("quaerat") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("quas", "itaque") {{ + password = ""; + username = ""; + }}; }}) .build(); ListSourcesRequest req = new ListSourcesRequest() {{ includeDeleted = false; - limit = 959167; - offset = 232865; + limit = 9240; + offset = 669917; workspaceIds = new String[]{{ - add("89fd871f-99dd-42ef-9121-aa6f1e674bdb"), - add("04f15756-082d-468e-a19f-1d17051339d0"), + add("dcf4b921-879f-4ce9-93f7-3ef7fbc7abd7"), }}; }}; @@ -1159,11 +1162,6 @@ import com.airbyte.api.models.shared.SourceConfluence; import com.airbyte.api.models.shared.SourceConfluenceConfluence; import com.airbyte.api.models.shared.SourceConvex; import com.airbyte.api.models.shared.SourceConvexConvex; -import com.airbyte.api.models.shared.SourceDatadog; -import com.airbyte.api.models.shared.SourceDatadogDatadog; -import com.airbyte.api.models.shared.SourceDatadogQueries; -import com.airbyte.api.models.shared.SourceDatadogQueriesDataSource; -import com.airbyte.api.models.shared.SourceDatadogSite; import com.airbyte.api.models.shared.SourceDatascope; import com.airbyte.api.models.shared.SourceDatascopeDatascope; import com.airbyte.api.models.shared.SourceDelighted; @@ -1291,7 +1289,9 @@ import com.airbyte.api.models.shared.SourceGoogleSearchConsoleAuthorizationOAuth import com.airbyte.api.models.shared.SourceGoogleSearchConsoleAuthorizationOAuthAuthType; import com.airbyte.api.models.shared.SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthentication; import com.airbyte.api.models.shared.SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthenticationAuthType; -import com.airbyte.api.models.shared.SourceGoogleSearchConsoleDataState; +import com.airbyte.api.models.shared.SourceGoogleSearchConsoleCustomReportConfig; +import com.airbyte.api.models.shared.SourceGoogleSearchConsoleCustomReportConfigValidEnums; +import com.airbyte.api.models.shared.SourceGoogleSearchConsoleDataFreshness; import com.airbyte.api.models.shared.SourceGoogleSearchConsoleGoogleSearchConsole; import com.airbyte.api.models.shared.SourceGoogleSheets; import com.airbyte.api.models.shared.SourceGoogleSheetsCredentialsAuthenticateViaGoogleOAuth; @@ -1355,7 +1355,7 @@ import com.airbyte.api.models.shared.SourceLeverHiringEnvironment; import com.airbyte.api.models.shared.SourceLeverHiringLeverHiring; import com.airbyte.api.models.shared.SourceLinkedinAds; import com.airbyte.api.models.shared.SourceLinkedinAdsAdAnalyticsReportConfiguration; -import com.airbyte.api.models.shared.SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy; +import com.airbyte.api.models.shared.SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory; import com.airbyte.api.models.shared.SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity; import com.airbyte.api.models.shared.SourceLinkedinAdsCredentialsAccessToken; import com.airbyte.api.models.shared.SourceLinkedinAdsCredentialsAccessTokenAuthMethod; @@ -1415,12 +1415,12 @@ import com.airbyte.api.models.shared.SourceMongodbInternalPocMongodbInternalPoc; import com.airbyte.api.models.shared.SourceMongodbMongodb; import com.airbyte.api.models.shared.SourceMssql; import com.airbyte.api.models.shared.SourceMssqlMssql; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodLogicalReplicationCDC; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodLogicalReplicationCDCMethod; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodStandard; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodStandardMethod; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod; import com.airbyte.api.models.shared.SourceMssqlSslMethodEncryptedTrustServerCertificate; import com.airbyte.api.models.shared.SourceMssqlSslMethodEncryptedTrustServerCertificateSslMethod; import com.airbyte.api.models.shared.SourceMssqlSslMethodEncryptedVerifyCertificate; @@ -1476,10 +1476,6 @@ import com.airbyte.api.models.shared.SourceOmnisendOmnisend; import com.airbyte.api.models.shared.SourceOnesignal; import com.airbyte.api.models.shared.SourceOnesignalApplications; import com.airbyte.api.models.shared.SourceOnesignalOnesignal; -import com.airbyte.api.models.shared.SourceOpenweather; -import com.airbyte.api.models.shared.SourceOpenweatherLanguage; -import com.airbyte.api.models.shared.SourceOpenweatherOpenweather; -import com.airbyte.api.models.shared.SourceOpenweatherUnits; import com.airbyte.api.models.shared.SourceOracle; import com.airbyte.api.models.shared.SourceOracleConnectionDataServiceName; import com.airbyte.api.models.shared.SourceOracleConnectionDataServiceNameConnectionType; @@ -1545,10 +1541,10 @@ import com.airbyte.api.models.shared.SourcePolygonStockApi; import com.airbyte.api.models.shared.SourcePolygonStockApiPolygonStockApi; import com.airbyte.api.models.shared.SourcePostgres; import com.airbyte.api.models.shared.SourcePostgresPostgres; -import com.airbyte.api.models.shared.SourcePostgresReplicationMethodStandard; -import com.airbyte.api.models.shared.SourcePostgresReplicationMethodStandardMethod; -import com.airbyte.api.models.shared.SourcePostgresReplicationMethodStandardXmin; -import com.airbyte.api.models.shared.SourcePostgresReplicationMethodStandardXminMethod; +import com.airbyte.api.models.shared.SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn; +import com.airbyte.api.models.shared.SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod; +import com.airbyte.api.models.shared.SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor; +import com.airbyte.api.models.shared.SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod; import com.airbyte.api.models.shared.SourcePostgresTunnelMethodNoTunnel; import com.airbyte.api.models.shared.SourcePostgresTunnelMethodNoTunnelTunnelMethod; import com.airbyte.api.models.shared.SourcePostgresTunnelMethodPasswordAuthentication; @@ -1561,8 +1557,6 @@ import com.airbyte.api.models.shared.SourcePostmarkapp; import com.airbyte.api.models.shared.SourcePostmarkappPostmarkapp; import com.airbyte.api.models.shared.SourcePrestashop; import com.airbyte.api.models.shared.SourcePrestashopPrestashop; -import com.airbyte.api.models.shared.SourcePublicApis; -import com.airbyte.api.models.shared.SourcePublicApisPublicApis; import com.airbyte.api.models.shared.SourcePunkApi; import com.airbyte.api.models.shared.SourcePunkApiPunkApi; import com.airbyte.api.models.shared.SourcePypi; @@ -1592,6 +1586,23 @@ import com.airbyte.api.models.shared.SourceRkiCovidRkiCovid; import com.airbyte.api.models.shared.SourceRss; import com.airbyte.api.models.shared.SourceRssRss; import com.airbyte.api.models.shared.SourceS3; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfig; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatAvroFormat; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatAvroFormatFiletype; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormat; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatFiletype; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatJsonlFormat; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatParquetFormat; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatParquetFormatFiletype; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigValidationPolicy; import com.airbyte.api.models.shared.SourceS3FormatAvro; import com.airbyte.api.models.shared.SourceS3FormatAvroFiletype; import com.airbyte.api.models.shared.SourceS3FormatCSV; @@ -1760,6 +1771,10 @@ import com.airbyte.api.models.shared.SourceZendeskChatCredentialsOAuth20; import com.airbyte.api.models.shared.SourceZendeskChatCredentialsOAuth20Credentials; import com.airbyte.api.models.shared.SourceZendeskChatZendeskChat; import com.airbyte.api.models.shared.SourceZendeskSunshine; +import com.airbyte.api.models.shared.SourceZendeskSunshineCredentialsAPIToken; +import com.airbyte.api.models.shared.SourceZendeskSunshineCredentialsAPITokenAuthMethod; +import com.airbyte.api.models.shared.SourceZendeskSunshineCredentialsOAuth20; +import com.airbyte.api.models.shared.SourceZendeskSunshineCredentialsOAuth20AuthMethod; import com.airbyte.api.models.shared.SourceZendeskSunshineZendeskSunshine; import com.airbyte.api.models.shared.SourceZendeskSupport; import com.airbyte.api.models.shared.SourceZendeskSupportZendeskSupport; @@ -1785,28 +1800,20 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("rem") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("dolore", "quibusdam") {{ + password = ""; + username = ""; + }}; }}) .build(); - PatchSourceRequest req = new PatchSourceRequest("aut") {{ + PatchSourceRequest req = new PatchSourceRequest("illum") {{ sourcePatchRequest = new SourcePatchRequest() {{ - configuration = new SourceMssql("master", "eum", 1433L, SourceMssqlMssql.MSSQL, "mollitia") {{ - jdbcUrlParams = "ab"; - password = "corrupti"; - replicationMethod = new SourceMssqlReplicationMethodStandard(SourceMssqlReplicationMethodStandardMethod.STANDARD);; - schemas = new String[]{{ - add("dolor"), - }}; - sslMethod = new SourceMssqlSslMethodEncryptedVerifyCertificate(SourceMssqlSslMethodEncryptedVerifyCertificateSslMethod.ENCRYPTED_VERIFY_CERTIFICATE) {{ - hostNameInCertificate = "numquam"; - }};; - tunnelMethod = new SourceMssqlTunnelMethodPasswordAuthentication("explicabo", SourceMssqlTunnelMethodPasswordAuthenticationTunnelMethod.SSH_PASSWORD_AUTH, 22L, "voluptas", "aut");; - }};; + configuration = new SourceDremio("natus", "impedit", SourceDremioDremio.DREMIO);; name = "My source"; - secretId = "dignissimos"; - workspaceId = "1f93f5f0-642d-4ac7-af51-5cc413aa63aa"; + secretId = "aut"; + workspaceId = "f5d2cff7-c70a-4456-a6d4-36813f16d9f5"; }};; }}; @@ -1958,11 +1965,6 @@ import com.airbyte.api.models.shared.SourceConfluence; import com.airbyte.api.models.shared.SourceConfluenceConfluence; import com.airbyte.api.models.shared.SourceConvex; import com.airbyte.api.models.shared.SourceConvexConvex; -import com.airbyte.api.models.shared.SourceDatadog; -import com.airbyte.api.models.shared.SourceDatadogDatadog; -import com.airbyte.api.models.shared.SourceDatadogQueries; -import com.airbyte.api.models.shared.SourceDatadogQueriesDataSource; -import com.airbyte.api.models.shared.SourceDatadogSite; import com.airbyte.api.models.shared.SourceDatascope; import com.airbyte.api.models.shared.SourceDatascopeDatascope; import com.airbyte.api.models.shared.SourceDelighted; @@ -2090,7 +2092,9 @@ import com.airbyte.api.models.shared.SourceGoogleSearchConsoleAuthorizationOAuth import com.airbyte.api.models.shared.SourceGoogleSearchConsoleAuthorizationOAuthAuthType; import com.airbyte.api.models.shared.SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthentication; import com.airbyte.api.models.shared.SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthenticationAuthType; -import com.airbyte.api.models.shared.SourceGoogleSearchConsoleDataState; +import com.airbyte.api.models.shared.SourceGoogleSearchConsoleCustomReportConfig; +import com.airbyte.api.models.shared.SourceGoogleSearchConsoleCustomReportConfigValidEnums; +import com.airbyte.api.models.shared.SourceGoogleSearchConsoleDataFreshness; import com.airbyte.api.models.shared.SourceGoogleSearchConsoleGoogleSearchConsole; import com.airbyte.api.models.shared.SourceGoogleSheets; import com.airbyte.api.models.shared.SourceGoogleSheetsCredentialsAuthenticateViaGoogleOAuth; @@ -2154,7 +2158,7 @@ import com.airbyte.api.models.shared.SourceLeverHiringEnvironment; import com.airbyte.api.models.shared.SourceLeverHiringLeverHiring; import com.airbyte.api.models.shared.SourceLinkedinAds; import com.airbyte.api.models.shared.SourceLinkedinAdsAdAnalyticsReportConfiguration; -import com.airbyte.api.models.shared.SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy; +import com.airbyte.api.models.shared.SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory; import com.airbyte.api.models.shared.SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity; import com.airbyte.api.models.shared.SourceLinkedinAdsCredentialsAccessToken; import com.airbyte.api.models.shared.SourceLinkedinAdsCredentialsAccessTokenAuthMethod; @@ -2214,12 +2218,12 @@ import com.airbyte.api.models.shared.SourceMongodbInternalPocMongodbInternalPoc; import com.airbyte.api.models.shared.SourceMongodbMongodb; import com.airbyte.api.models.shared.SourceMssql; import com.airbyte.api.models.shared.SourceMssqlMssql; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodLogicalReplicationCDC; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodLogicalReplicationCDCMethod; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodStandard; -import com.airbyte.api.models.shared.SourceMssqlReplicationMethodStandardMethod; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor; +import com.airbyte.api.models.shared.SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod; import com.airbyte.api.models.shared.SourceMssqlSslMethodEncryptedTrustServerCertificate; import com.airbyte.api.models.shared.SourceMssqlSslMethodEncryptedTrustServerCertificateSslMethod; import com.airbyte.api.models.shared.SourceMssqlSslMethodEncryptedVerifyCertificate; @@ -2275,10 +2279,6 @@ import com.airbyte.api.models.shared.SourceOmnisendOmnisend; import com.airbyte.api.models.shared.SourceOnesignal; import com.airbyte.api.models.shared.SourceOnesignalApplications; import com.airbyte.api.models.shared.SourceOnesignalOnesignal; -import com.airbyte.api.models.shared.SourceOpenweather; -import com.airbyte.api.models.shared.SourceOpenweatherLanguage; -import com.airbyte.api.models.shared.SourceOpenweatherOpenweather; -import com.airbyte.api.models.shared.SourceOpenweatherUnits; import com.airbyte.api.models.shared.SourceOracle; import com.airbyte.api.models.shared.SourceOracleConnectionDataServiceName; import com.airbyte.api.models.shared.SourceOracleConnectionDataServiceNameConnectionType; @@ -2343,10 +2343,10 @@ import com.airbyte.api.models.shared.SourcePolygonStockApi; import com.airbyte.api.models.shared.SourcePolygonStockApiPolygonStockApi; import com.airbyte.api.models.shared.SourcePostgres; import com.airbyte.api.models.shared.SourcePostgresPostgres; -import com.airbyte.api.models.shared.SourcePostgresReplicationMethodStandard; -import com.airbyte.api.models.shared.SourcePostgresReplicationMethodStandardMethod; -import com.airbyte.api.models.shared.SourcePostgresReplicationMethodStandardXmin; -import com.airbyte.api.models.shared.SourcePostgresReplicationMethodStandardXminMethod; +import com.airbyte.api.models.shared.SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn; +import com.airbyte.api.models.shared.SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod; +import com.airbyte.api.models.shared.SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor; +import com.airbyte.api.models.shared.SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod; import com.airbyte.api.models.shared.SourcePostgresTunnelMethodNoTunnel; import com.airbyte.api.models.shared.SourcePostgresTunnelMethodNoTunnelTunnelMethod; import com.airbyte.api.models.shared.SourcePostgresTunnelMethodPasswordAuthentication; @@ -2359,8 +2359,6 @@ import com.airbyte.api.models.shared.SourcePostmarkapp; import com.airbyte.api.models.shared.SourcePostmarkappPostmarkapp; import com.airbyte.api.models.shared.SourcePrestashop; import com.airbyte.api.models.shared.SourcePrestashopPrestashop; -import com.airbyte.api.models.shared.SourcePublicApis; -import com.airbyte.api.models.shared.SourcePublicApisPublicApis; import com.airbyte.api.models.shared.SourcePunkApi; import com.airbyte.api.models.shared.SourcePunkApiPunkApi; import com.airbyte.api.models.shared.SourcePutRequest; @@ -2391,6 +2389,23 @@ import com.airbyte.api.models.shared.SourceRkiCovidRkiCovid; import com.airbyte.api.models.shared.SourceRss; import com.airbyte.api.models.shared.SourceRssRss; import com.airbyte.api.models.shared.SourceS3; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfig; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatAvroFormat; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatAvroFormatFiletype; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormat; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatFiletype; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatJsonlFormat; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatParquetFormat; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigFormatParquetFormatFiletype; +import com.airbyte.api.models.shared.SourceS3FileBasedStreamConfigValidationPolicy; import com.airbyte.api.models.shared.SourceS3FormatAvro; import com.airbyte.api.models.shared.SourceS3FormatAvroFiletype; import com.airbyte.api.models.shared.SourceS3FormatCSV; @@ -2559,6 +2574,10 @@ import com.airbyte.api.models.shared.SourceZendeskChatCredentialsOAuth20; import com.airbyte.api.models.shared.SourceZendeskChatCredentialsOAuth20Credentials; import com.airbyte.api.models.shared.SourceZendeskChatZendeskChat; import com.airbyte.api.models.shared.SourceZendeskSunshine; +import com.airbyte.api.models.shared.SourceZendeskSunshineCredentialsAPIToken; +import com.airbyte.api.models.shared.SourceZendeskSunshineCredentialsAPITokenAuthMethod; +import com.airbyte.api.models.shared.SourceZendeskSunshineCredentialsOAuth20; +import com.airbyte.api.models.shared.SourceZendeskSunshineCredentialsOAuth20AuthMethod; import com.airbyte.api.models.shared.SourceZendeskSunshineZendeskSunshine; import com.airbyte.api.models.shared.SourceZendeskSupport; import com.airbyte.api.models.shared.SourceZendeskSupportZendeskSupport; @@ -2584,21 +2603,22 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("recusandae") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("sapiente", "quisquam") {{ + password = ""; + username = ""; + }}; }}) .build(); - PutSourceRequest req = new PutSourceRequest("totam") {{ - sourcePutRequest = new SourcePutRequest( new SourceTheGuardianApi("labore", SourceTheGuardianApiTheGuardianApi.THE_GUARDIAN_API, "YYYY-MM-DD") {{ - apiKey = "vel"; - endDate = "YYYY-MM-DD"; - query = "environment AND political"; - section = "technology"; - sourceType = SourceTheGuardianApiTheGuardianApi.THE_GUARDIAN_API; - startDate = "YYYY-MM-DD"; - tag = "environment/plasticbags"; - }}, "possimus");; + PutSourceRequest req = new PutSourceRequest("saepe") {{ + sourcePutRequest = new SourcePutRequest( new SourceKlarna("ea", false, SourceKlarnaRegion.OC, SourceKlarnaKlarna.KLARNA, "consectetur") {{ + password = "impedit"; + playground = false; + region = SourceKlarnaRegion.US; + sourceType = SourceKlarnaKlarna.KLARNA; + username = "Estel30"; + }}, "recusandae");; }}; PutSourceResponse res = sdk.sources.putSource(req); diff --git a/docs/sdks/streams/README.md b/docs/sdks/streams/README.md index b8ef3d7aa..a2912d7e6 100755 --- a/docs/sdks/streams/README.md +++ b/docs/sdks/streams/README.md @@ -22,12 +22,15 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("facilis") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("aspernatur", "minima") {{ + password = ""; + username = ""; + }}; }}) .build(); - GetStreamPropertiesRequest req = new GetStreamPropertiesRequest("cum", "commodi") {{ + GetStreamPropertiesRequest req = new GetStreamPropertiesRequest("eaque", "a") {{ ignoreCache = false; }}; diff --git a/docs/sdks/workspaces/README.md b/docs/sdks/workspaces/README.md index 4a2415a8e..dc4fb3886 100755 --- a/docs/sdks/workspaces/README.md +++ b/docs/sdks/workspaces/README.md @@ -106,17 +106,20 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("in") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("libero", "aut") {{ + password = ""; + username = ""; + }}; }}) .build(); - CreateOrUpdateWorkspaceOAuthCredentialsRequest req = new CreateOrUpdateWorkspaceOAuthCredentialsRequest( new WorkspaceOAuthCredentialsRequest(ActorTypeEnum.SOURCE, new ZendeskSupport() {{ - credentials = new ZendeskSupportCredentials() {{ - clientId = "assumenda"; - clientSecret = "nemo"; + CreateOrUpdateWorkspaceOAuthCredentialsRequest req = new CreateOrUpdateWorkspaceOAuthCredentialsRequest( new WorkspaceOAuthCredentialsRequest(ActorTypeEnum.SOURCE, new Notion() {{ + credentials = new NotionCredentials() {{ + clientId = "impedit"; + clientSecret = "aliquam"; }}; - }}, OAuthActorNames.ZENDESK_CHAT);, "aliquid"); + }}, OAuthActorNames.FACEBOOK_PAGES);, "accusamus"); CreateOrUpdateWorkspaceOAuthCredentialsResponse res = sdk.workspaces.createOrUpdateWorkspaceOAuthCredentials(req); @@ -160,12 +163,15 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("aperiam") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("inventore", "non") {{ + password = ""; + username = ""; + }}; }}) .build(); - com.airbyte.api.models.shared.WorkspaceCreateRequest req = new WorkspaceCreateRequest("cum"); + com.airbyte.api.models.shared.WorkspaceCreateRequest req = new WorkspaceCreateRequest("et"); CreateWorkspaceResponse res = sdk.workspaces.createWorkspace(req); @@ -209,12 +215,15 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("consectetur") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("dolorum", "laborum") {{ + password = ""; + username = ""; + }}; }}) .build(); - DeleteWorkspaceRequest req = new DeleteWorkspaceRequest("in"); + DeleteWorkspaceRequest req = new DeleteWorkspaceRequest("placeat"); DeleteWorkspaceResponse res = sdk.workspaces.deleteWorkspace(req); @@ -258,12 +267,15 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("exercitationem") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("velit", "eum") {{ + password = ""; + username = ""; + }}; }}) .build(); - GetWorkspaceRequest req = new GetWorkspaceRequest("earum"); + GetWorkspaceRequest req = new GetWorkspaceRequest("autem"); GetWorkspaceResponse res = sdk.workspaces.getWorkspace(req); @@ -307,18 +319,20 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("facere") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("nobis", "quas") {{ + password = ""; + username = ""; + }}; }}) .build(); ListWorkspacesRequest req = new ListWorkspacesRequest() {{ includeDeleted = false; - limit = 257233; - offset = 985492; + limit = 829603; + offset = 860552; workspaceIds = new String[]{{ - add("fbee41f3-3317-4fe3-9b60-eb1ea426555b"), - add("a3c28744-ed53-4b88-b3a8-d8f5c0b2f2fb"), + add("6b144290-7474-4778-a7bd-466d28c10ab3"), }}; }}; @@ -365,12 +379,15 @@ public class Application { public static void main(String[] args) { try { Airbyte sdk = Airbyte.builder() - .setSecurity(new Security("voluptate") {{ - bearerAuth = ""; + .setSecurity(new Security() {{ + basicAuth = new SchemeBasicAuth("quo", "illum") {{ + password = ""; + username = ""; + }}; }}) .build(); - UpdateWorkspaceRequest req = new UpdateWorkspaceRequest( new WorkspaceUpdateRequest("expedita");, "ab"); + UpdateWorkspaceRequest req = new UpdateWorkspaceRequest( new WorkspaceUpdateRequest("quo");, "fuga"); UpdateWorkspaceResponse res = sdk.workspaces.updateWorkspace(req); diff --git a/files.gen b/files.gen index cb5bbd862..f51466818 100755 --- a/files.gen +++ b/files.gen @@ -247,6 +247,24 @@ lib/src/main/java/com/airbyte/api/models/shared/DestinationLangchainIndexingPine lib/src/main/java/com/airbyte/api/models/shared/DestinationLangchainIndexingPinecone.java lib/src/main/java/com/airbyte/api/models/shared/DestinationLangchainProcessingConfigModel.java lib/src/main/java/com/airbyte/api/models/shared/DestinationLangchain.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusMilvus.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFromFieldMode.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFromField.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFakeMode.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFake.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingCohereMode.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingCohere.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingOpenAIMode.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingOpenAI.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthNoAuthMode.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthNoAuth.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthUsernamePasswordMode.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthUsernamePassword.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthAPITokenMode.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthAPIToken.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexing.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusProcessingConfigModel.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvus.java lib/src/main/java/com/airbyte/api/models/shared/DestinationMongodbAuthTypeLoginPasswordAuthorization.java lib/src/main/java/com/airbyte/api/models/shared/DestinationMongodbAuthTypeLoginPassword.java lib/src/main/java/com/airbyte/api/models/shared/DestinationMongodbAuthTypeNoneAuthorization.java @@ -293,6 +311,16 @@ lib/src/main/java/com/airbyte/api/models/shared/DestinationOracleTunnelMethodSSH lib/src/main/java/com/airbyte/api/models/shared/DestinationOracleTunnelMethodNoTunnelTunnelMethod.java lib/src/main/java/com/airbyte/api/models/shared/DestinationOracleTunnelMethodNoTunnel.java lib/src/main/java/com/airbyte/api/models/shared/DestinationOracle.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconePinecone.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingFakeMode.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingFake.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingCohereMode.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingCohere.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingOpenAIMode.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingOpenAI.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeIndexing.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeProcessingConfigModel.java +lib/src/main/java/com/airbyte/api/models/shared/DestinationPinecone.java lib/src/main/java/com/airbyte/api/models/shared/DestinationPostgresPostgres.java lib/src/main/java/com/airbyte/api/models/shared/DestinationPostgresSslModeVerifyFullMode.java lib/src/main/java/com/airbyte/api/models/shared/DestinationPostgresSslModeVerifyFull.java @@ -537,11 +565,6 @@ lib/src/main/java/com/airbyte/api/models/shared/SourceConfluenceConfluence.java lib/src/main/java/com/airbyte/api/models/shared/SourceConfluence.java lib/src/main/java/com/airbyte/api/models/shared/SourceConvexConvex.java lib/src/main/java/com/airbyte/api/models/shared/SourceConvex.java -lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogQueriesDataSource.java -lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogQueries.java -lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogSite.java -lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogDatadog.java -lib/src/main/java/com/airbyte/api/models/shared/SourceDatadog.java lib/src/main/java/com/airbyte/api/models/shared/SourceDatascopeDatascope.java lib/src/main/java/com/airbyte/api/models/shared/SourceDatascope.java lib/src/main/java/com/airbyte/api/models/shared/SourceDelightedDelighted.java @@ -668,7 +691,9 @@ lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleAuthori lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthentication.java lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleAuthorizationOAuthAuthType.java lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleAuthorizationOAuth.java -lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleDataState.java +lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleCustomReportConfigValidEnums.java +lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleCustomReportConfig.java +lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleDataFreshness.java lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleGoogleSearchConsole.java lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsole.java lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSheetsCredentialsServiceAccountKeyAuthenticationAuthType.java @@ -731,7 +756,7 @@ lib/src/main/java/com/airbyte/api/models/shared/SourceLeverHiringCredentialsAuth lib/src/main/java/com/airbyte/api/models/shared/SourceLeverHiringEnvironment.java lib/src/main/java/com/airbyte/api/models/shared/SourceLeverHiringLeverHiring.java lib/src/main/java/com/airbyte/api/models/shared/SourceLeverHiring.java -lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy.java +lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory.java lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity.java lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfiguration.java lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsCredentialsAccessTokenAuthMethod.java @@ -791,12 +816,12 @@ lib/src/main/java/com/airbyte/api/models/shared/SourceMongodbMongodb.java lib/src/main/java/com/airbyte/api/models/shared/SourceMongodb.java lib/src/main/java/com/airbyte/api/models/shared/SourceMongodbInternalPocMongodbInternalPoc.java lib/src/main/java/com/airbyte/api/models/shared/SourceMongodbInternalPoc.java -lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync.java -lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCMethod.java -lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel.java -lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDC.java -lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodStandardMethod.java -lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodStandard.java +lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod.java +lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor.java +lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync.java +lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod.java +lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel.java +lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC.java lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlMssql.java lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlSslMethodEncryptedVerifyCertificateSslMethod.java lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlSslMethodEncryptedVerifyCertificate.java @@ -854,10 +879,6 @@ lib/src/main/java/com/airbyte/api/models/shared/SourceOmnisend.java lib/src/main/java/com/airbyte/api/models/shared/SourceOnesignalApplications.java lib/src/main/java/com/airbyte/api/models/shared/SourceOnesignalOnesignal.java lib/src/main/java/com/airbyte/api/models/shared/SourceOnesignal.java -lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweatherLanguage.java -lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweatherOpenweather.java -lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweatherUnits.java -lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweather.java lib/src/main/java/com/airbyte/api/models/shared/SourceOracleConnectionDataSystemIDSIDConnectionType.java lib/src/main/java/com/airbyte/api/models/shared/SourceOracleConnectionDataSystemIDSID.java lib/src/main/java/com/airbyte/api/models/shared/SourceOracleConnectionDataServiceNameConnectionType.java @@ -918,10 +939,10 @@ lib/src/main/java/com/airbyte/api/models/shared/SourcePocketState.java lib/src/main/java/com/airbyte/api/models/shared/SourcePocket.java lib/src/main/java/com/airbyte/api/models/shared/SourcePolygonStockApiPolygonStockApi.java lib/src/main/java/com/airbyte/api/models/shared/SourcePolygonStockApi.java -lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandardMethod.java -lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandard.java -lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandardXminMethod.java -lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandardXmin.java +lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod.java +lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor.java +lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod.java +lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn.java lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresPostgres.java lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresTunnelMethodPasswordAuthenticationTunnelMethod.java lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresTunnelMethodPasswordAuthentication.java @@ -936,8 +957,6 @@ lib/src/main/java/com/airbyte/api/models/shared/SourcePostmarkappPostmarkapp.jav lib/src/main/java/com/airbyte/api/models/shared/SourcePostmarkapp.java lib/src/main/java/com/airbyte/api/models/shared/SourcePrestashopPrestashop.java lib/src/main/java/com/airbyte/api/models/shared/SourcePrestashop.java -lib/src/main/java/com/airbyte/api/models/shared/SourcePublicApisPublicApis.java -lib/src/main/java/com/airbyte/api/models/shared/SourcePublicApis.java lib/src/main/java/com/airbyte/api/models/shared/SourcePunkApiPunkApi.java lib/src/main/java/com/airbyte/api/models/shared/SourcePunkApi.java lib/src/main/java/com/airbyte/api/models/shared/SourcePypiPypi.java @@ -977,6 +996,23 @@ lib/src/main/java/com/airbyte/api/models/shared/SourceS3FormatCSVFiletype.java lib/src/main/java/com/airbyte/api/models/shared/SourceS3FormatCSV.java lib/src/main/java/com/airbyte/api/models/shared/SourceS3S3AmazonWebServices.java lib/src/main/java/com/airbyte/api/models/shared/SourceS3S3.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormatFiletype.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormat.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormat.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatFiletype.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormat.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormatFiletype.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormat.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigValidationPolicy.java +lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfig.java lib/src/main/java/com/airbyte/api/models/shared/SourceS3.java lib/src/main/java/com/airbyte/api/models/shared/SourceSalesforceAuthType.java lib/src/main/java/com/airbyte/api/models/shared/SourceSalesforceSalesforce.java @@ -1010,10 +1046,10 @@ lib/src/main/java/com/airbyte/api/models/shared/SourceSftp.java lib/src/main/java/com/airbyte/api/models/shared/SourceSftpBulkFileType.java lib/src/main/java/com/airbyte/api/models/shared/SourceSftpBulkSftpBulk.java lib/src/main/java/com/airbyte/api/models/shared/SourceSftpBulk.java -lib/src/main/java/com/airbyte/api/models/shared/SourceShopifyCredentialsOAuth20AuthMethod.java -lib/src/main/java/com/airbyte/api/models/shared/SourceShopifyCredentialsOAuth20.java lib/src/main/java/com/airbyte/api/models/shared/SourceShopifyCredentialsAPIPasswordAuthMethod.java lib/src/main/java/com/airbyte/api/models/shared/SourceShopifyCredentialsAPIPassword.java +lib/src/main/java/com/airbyte/api/models/shared/SourceShopifyCredentialsOAuth20AuthMethod.java +lib/src/main/java/com/airbyte/api/models/shared/SourceShopifyCredentialsOAuth20.java lib/src/main/java/com/airbyte/api/models/shared/SourceShopifyShopify.java lib/src/main/java/com/airbyte/api/models/shared/SourceShopify.java lib/src/main/java/com/airbyte/api/models/shared/SourceShortioShortio.java @@ -1134,6 +1170,10 @@ lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskChatCredentialsOAut lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskChatCredentialsOAuth20.java lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskChatZendeskChat.java lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskChat.java +lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsAPITokenAuthMethod.java +lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsAPIToken.java +lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsOAuth20AuthMethod.java +lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsOAuth20.java lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineZendeskSunshine.java lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshine.java lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSupportZendeskSupport.java @@ -1243,6 +1283,7 @@ lib/src/main/java/com/airbyte/api/models/shared/WorkspaceResponse.java lib/src/main/java/com/airbyte/api/models/shared/WorkspaceCreateRequest.java lib/src/main/java/com/airbyte/api/models/shared/WorkspacesResponse.java lib/src/main/java/com/airbyte/api/models/shared/WorkspaceUpdateRequest.java +lib/src/main/java/com/airbyte/api/models/shared/SchemeBasicAuth.java lib/src/main/java/com/airbyte/api/models/shared/Security.java docs/sdks/airbyte/README.md docs/sdks/connections/README.md @@ -1464,6 +1505,24 @@ docs/models/shared/DestinationLangchainIndexingPineconeMode.md docs/models/shared/DestinationLangchainIndexingPinecone.md docs/models/shared/DestinationLangchainProcessingConfigModel.md docs/models/shared/DestinationLangchain.md +docs/models/shared/DestinationMilvusMilvus.md +docs/models/shared/DestinationMilvusEmbeddingFromFieldMode.md +docs/models/shared/DestinationMilvusEmbeddingFromField.md +docs/models/shared/DestinationMilvusEmbeddingFakeMode.md +docs/models/shared/DestinationMilvusEmbeddingFake.md +docs/models/shared/DestinationMilvusEmbeddingCohereMode.md +docs/models/shared/DestinationMilvusEmbeddingCohere.md +docs/models/shared/DestinationMilvusEmbeddingOpenAIMode.md +docs/models/shared/DestinationMilvusEmbeddingOpenAI.md +docs/models/shared/DestinationMilvusIndexingAuthNoAuthMode.md +docs/models/shared/DestinationMilvusIndexingAuthNoAuth.md +docs/models/shared/DestinationMilvusIndexingAuthUsernamePasswordMode.md +docs/models/shared/DestinationMilvusIndexingAuthUsernamePassword.md +docs/models/shared/DestinationMilvusIndexingAuthAPITokenMode.md +docs/models/shared/DestinationMilvusIndexingAuthAPIToken.md +docs/models/shared/DestinationMilvusIndexing.md +docs/models/shared/DestinationMilvusProcessingConfigModel.md +docs/models/shared/DestinationMilvus.md docs/models/shared/DestinationMongodbAuthTypeLoginPasswordAuthorization.md docs/models/shared/DestinationMongodbAuthTypeLoginPassword.md docs/models/shared/DestinationMongodbAuthTypeNoneAuthorization.md @@ -1510,6 +1569,16 @@ docs/models/shared/DestinationOracleTunnelMethodSSHKeyAuthentication.md docs/models/shared/DestinationOracleTunnelMethodNoTunnelTunnelMethod.md docs/models/shared/DestinationOracleTunnelMethodNoTunnel.md docs/models/shared/DestinationOracle.md +docs/models/shared/DestinationPineconePinecone.md +docs/models/shared/DestinationPineconeEmbeddingFakeMode.md +docs/models/shared/DestinationPineconeEmbeddingFake.md +docs/models/shared/DestinationPineconeEmbeddingCohereMode.md +docs/models/shared/DestinationPineconeEmbeddingCohere.md +docs/models/shared/DestinationPineconeEmbeddingOpenAIMode.md +docs/models/shared/DestinationPineconeEmbeddingOpenAI.md +docs/models/shared/DestinationPineconeIndexing.md +docs/models/shared/DestinationPineconeProcessingConfigModel.md +docs/models/shared/DestinationPinecone.md docs/models/shared/DestinationPostgresPostgres.md docs/models/shared/DestinationPostgresSslModeVerifyFullMode.md docs/models/shared/DestinationPostgresSslModeVerifyFull.md @@ -1754,11 +1823,6 @@ docs/models/shared/SourceConfluenceConfluence.md docs/models/shared/SourceConfluence.md docs/models/shared/SourceConvexConvex.md docs/models/shared/SourceConvex.md -docs/models/shared/SourceDatadogQueriesDataSource.md -docs/models/shared/SourceDatadogQueries.md -docs/models/shared/SourceDatadogSite.md -docs/models/shared/SourceDatadogDatadog.md -docs/models/shared/SourceDatadog.md docs/models/shared/SourceDatascopeDatascope.md docs/models/shared/SourceDatascope.md docs/models/shared/SourceDelightedDelighted.md @@ -1885,7 +1949,9 @@ docs/models/shared/SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthen docs/models/shared/SourceGoogleSearchConsoleAuthorizationServiceAccountKeyAuthentication.md docs/models/shared/SourceGoogleSearchConsoleAuthorizationOAuthAuthType.md docs/models/shared/SourceGoogleSearchConsoleAuthorizationOAuth.md -docs/models/shared/SourceGoogleSearchConsoleDataState.md +docs/models/shared/SourceGoogleSearchConsoleCustomReportConfigValidEnums.md +docs/models/shared/SourceGoogleSearchConsoleCustomReportConfig.md +docs/models/shared/SourceGoogleSearchConsoleDataFreshness.md docs/models/shared/SourceGoogleSearchConsoleGoogleSearchConsole.md docs/models/shared/SourceGoogleSearchConsole.md docs/models/shared/SourceGoogleSheetsCredentialsServiceAccountKeyAuthenticationAuthType.md @@ -1948,7 +2014,7 @@ docs/models/shared/SourceLeverHiringCredentialsAuthenticateViaLeverOAuth.md docs/models/shared/SourceLeverHiringEnvironment.md docs/models/shared/SourceLeverHiringLeverHiring.md docs/models/shared/SourceLeverHiring.md -docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy.md +docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory.md docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity.md docs/models/shared/SourceLinkedinAdsAdAnalyticsReportConfiguration.md docs/models/shared/SourceLinkedinAdsCredentialsAccessTokenAuthMethod.md @@ -2008,12 +2074,12 @@ docs/models/shared/SourceMongodbMongodb.md docs/models/shared/SourceMongodb.md docs/models/shared/SourceMongodbInternalPocMongodbInternalPoc.md docs/models/shared/SourceMongodbInternalPoc.md -docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync.md -docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCMethod.md -docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel.md -docs/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDC.md -docs/models/shared/SourceMssqlReplicationMethodStandardMethod.md -docs/models/shared/SourceMssqlReplicationMethodStandard.md +docs/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod.md +docs/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor.md +docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync.md +docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod.md +docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel.md +docs/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC.md docs/models/shared/SourceMssqlMssql.md docs/models/shared/SourceMssqlSslMethodEncryptedVerifyCertificateSslMethod.md docs/models/shared/SourceMssqlSslMethodEncryptedVerifyCertificate.md @@ -2071,10 +2137,6 @@ docs/models/shared/SourceOmnisend.md docs/models/shared/SourceOnesignalApplications.md docs/models/shared/SourceOnesignalOnesignal.md docs/models/shared/SourceOnesignal.md -docs/models/shared/SourceOpenweatherLanguage.md -docs/models/shared/SourceOpenweatherOpenweather.md -docs/models/shared/SourceOpenweatherUnits.md -docs/models/shared/SourceOpenweather.md docs/models/shared/SourceOracleConnectionDataSystemIDSIDConnectionType.md docs/models/shared/SourceOracleConnectionDataSystemIDSID.md docs/models/shared/SourceOracleConnectionDataServiceNameConnectionType.md @@ -2135,10 +2197,10 @@ docs/models/shared/SourcePocketState.md docs/models/shared/SourcePocket.md docs/models/shared/SourcePolygonStockApiPolygonStockApi.md docs/models/shared/SourcePolygonStockApi.md -docs/models/shared/SourcePostgresReplicationMethodStandardMethod.md -docs/models/shared/SourcePostgresReplicationMethodStandard.md -docs/models/shared/SourcePostgresReplicationMethodStandardXminMethod.md -docs/models/shared/SourcePostgresReplicationMethodStandardXmin.md +docs/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod.md +docs/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor.md +docs/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod.md +docs/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn.md docs/models/shared/SourcePostgresPostgres.md docs/models/shared/SourcePostgresTunnelMethodPasswordAuthenticationTunnelMethod.md docs/models/shared/SourcePostgresTunnelMethodPasswordAuthentication.md @@ -2153,8 +2215,6 @@ docs/models/shared/SourcePostmarkappPostmarkapp.md docs/models/shared/SourcePostmarkapp.md docs/models/shared/SourcePrestashopPrestashop.md docs/models/shared/SourcePrestashop.md -docs/models/shared/SourcePublicApisPublicApis.md -docs/models/shared/SourcePublicApis.md docs/models/shared/SourcePunkApiPunkApi.md docs/models/shared/SourcePunkApi.md docs/models/shared/SourcePypiPypi.md @@ -2194,6 +2254,23 @@ docs/models/shared/SourceS3FormatCSVFiletype.md docs/models/shared/SourceS3FormatCSV.md docs/models/shared/SourceS3S3AmazonWebServices.md docs/models/shared/SourceS3S3.md +docs/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormatFiletype.md +docs/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormat.md +docs/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype.md +docs/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormat.md +docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatFiletype.md +docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType.md +docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided.md +docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType.md +docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated.md +docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType.md +docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV.md +docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType.md +docs/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormat.md +docs/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormatFiletype.md +docs/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormat.md +docs/models/shared/SourceS3FileBasedStreamConfigValidationPolicy.md +docs/models/shared/SourceS3FileBasedStreamConfig.md docs/models/shared/SourceS3.md docs/models/shared/SourceSalesforceAuthType.md docs/models/shared/SourceSalesforceSalesforce.md @@ -2227,10 +2304,10 @@ docs/models/shared/SourceSftp.md docs/models/shared/SourceSftpBulkFileType.md docs/models/shared/SourceSftpBulkSftpBulk.md docs/models/shared/SourceSftpBulk.md -docs/models/shared/SourceShopifyCredentialsOAuth20AuthMethod.md -docs/models/shared/SourceShopifyCredentialsOAuth20.md docs/models/shared/SourceShopifyCredentialsAPIPasswordAuthMethod.md docs/models/shared/SourceShopifyCredentialsAPIPassword.md +docs/models/shared/SourceShopifyCredentialsOAuth20AuthMethod.md +docs/models/shared/SourceShopifyCredentialsOAuth20.md docs/models/shared/SourceShopifyShopify.md docs/models/shared/SourceShopify.md docs/models/shared/SourceShortioShortio.md @@ -2351,6 +2428,10 @@ docs/models/shared/SourceZendeskChatCredentialsOAuth20Credentials.md docs/models/shared/SourceZendeskChatCredentialsOAuth20.md docs/models/shared/SourceZendeskChatZendeskChat.md docs/models/shared/SourceZendeskChat.md +docs/models/shared/SourceZendeskSunshineCredentialsAPITokenAuthMethod.md +docs/models/shared/SourceZendeskSunshineCredentialsAPIToken.md +docs/models/shared/SourceZendeskSunshineCredentialsOAuth20AuthMethod.md +docs/models/shared/SourceZendeskSunshineCredentialsOAuth20.md docs/models/shared/SourceZendeskSunshineZendeskSunshine.md docs/models/shared/SourceZendeskSunshine.md docs/models/shared/SourceZendeskSupportZendeskSupport.md @@ -2460,4 +2541,5 @@ docs/models/shared/WorkspaceResponse.md docs/models/shared/WorkspaceCreateRequest.md docs/models/shared/WorkspacesResponse.md docs/models/shared/WorkspaceUpdateRequest.md +docs/models/shared/SchemeBasicAuth.md docs/models/shared/Security.md \ No newline at end of file diff --git a/gen.yaml b/gen.yaml index 214945836..6e09d526e 100644 --- a/gen.yaml +++ b/gen.yaml @@ -1,9 +1,9 @@ configVersion: 1.0.0 management: - docChecksum: 677c33932664e2e3ad3b815e949e0aa9 + docChecksum: 5b9a9a6f493e89e080a57e42997074be docVersion: 1.0.0 - speakeasyVersion: 1.77.2 - generationVersion: 2.93.0 + speakeasyVersion: 1.82.5 + generationVersion: 2.108.3 generation: comments: disableComments: false @@ -20,7 +20,7 @@ features: globalServerURLs: 2.82.0 includes: 2.81.1 java: - version: 0.39.1 + version: 0.39.2 artifactID: api companyEmail: info@airbyte.com companyName: Airbyte diff --git a/lib/build.gradle b/lib/build.gradle index 922231ba5..68cd241ab 100755 --- a/lib/build.gradle +++ b/lib/build.gradle @@ -41,7 +41,7 @@ publishing { maven(MavenPublication) { groupId = 'com.airbyte' artifactId = 'api' - version = '0.39.1' + version = '0.39.2' from components.java diff --git a/lib/src/main/java/com/airbyte/api/Airbyte.java b/lib/src/main/java/com/airbyte/api/Airbyte.java index 55bd5818a..d18b4463b 100755 --- a/lib/src/main/java/com/airbyte/api/Airbyte.java +++ b/lib/src/main/java/com/airbyte/api/Airbyte.java @@ -7,6 +7,9 @@ import com.airbyte.api.utils.HTTPClient; import com.airbyte.api.utils.SpeakeasyHTTPClient; +/** + * airbyte-api: Programatically control Airbyte Cloud, OSS & Enterprise. + */ public class Airbyte { /** * SERVERS contains the list of server urls available to the SDK. diff --git a/lib/src/main/java/com/airbyte/api/SDKConfiguration.java b/lib/src/main/java/com/airbyte/api/SDKConfiguration.java index ec9e7e3cf..5eb4e588d 100755 --- a/lib/src/main/java/com/airbyte/api/SDKConfiguration.java +++ b/lib/src/main/java/com/airbyte/api/SDKConfiguration.java @@ -15,7 +15,7 @@ class SDKConfiguration { public int serverIdx = 0; public String language = "java"; public String openapiDocVersion = "1.0.0"; - public String sdkVersion = "0.39.1"; - public String genVersion = "2.93.0"; + public String sdkVersion = "0.39.2"; + public String genVersion = "2.108.3"; } \ No newline at end of file diff --git a/lib/src/main/java/com/airbyte/api/models/shared/ConnectionsResponse.java b/lib/src/main/java/com/airbyte/api/models/shared/ConnectionsResponse.java index 827781c39..6eb3aaa9c 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/ConnectionsResponse.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/ConnectionsResponse.java @@ -8,9 +8,6 @@ import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -/** - * ConnectionsResponse - Successful operation - */ public class ConnectionsResponse { @JsonProperty("data") diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationBigquery.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationBigquery.java index d2a296661..7d4905596 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/DestinationBigquery.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationBigquery.java @@ -91,7 +91,7 @@ public DestinationBigquery withProjectId(String projectId) { } /** - * (Early Access) The dataset to write raw tables into + * The dataset to write raw tables into */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("raw_data_dataset") @@ -114,18 +114,6 @@ public DestinationBigquery withTransformationPriority(DestinationBigqueryTransfo return this; } - /** - * (Early Access) Use <a href="https://docs.airbyte.com/understanding-airbyte/typing-deduping" target="_blank">Destinations V2</a>. - */ - @JsonInclude(Include.NON_ABSENT) - @JsonProperty("use_1s1t_format") - public Boolean use1s1tFormat; - - public DestinationBigquery withUse1s1tFormat(Boolean use1s1tFormat) { - this.use1s1tFormat = use1s1tFormat; - return this; - } - public DestinationBigquery(@JsonProperty("dataset_id") String datasetId, @JsonProperty("dataset_location") DestinationBigqueryDatasetLocation datasetLocation, @JsonProperty("destinationType") DestinationBigqueryBigquery destinationType, @JsonProperty("project_id") String projectId) { this.datasetId = datasetId; this.datasetLocation = datasetLocation; diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvus.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvus.java new file mode 100755 index 000000000..cbb46b2d7 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvus.java @@ -0,0 +1,58 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DestinationMilvus - The values required to configure the destination. + */ + +public class DestinationMilvus { + @JsonProperty("destinationType") + public DestinationMilvusMilvus destinationType; + + public DestinationMilvus withDestinationType(DestinationMilvusMilvus destinationType) { + this.destinationType = destinationType; + return this; + } + + /** + * Embedding configuration + */ + @JsonProperty("embedding") + public Object embedding; + + public DestinationMilvus withEmbedding(Object embedding) { + this.embedding = embedding; + return this; + } + + /** + * Indexing configuration + */ + @JsonProperty("indexing") + public DestinationMilvusIndexing indexing; + + public DestinationMilvus withIndexing(DestinationMilvusIndexing indexing) { + this.indexing = indexing; + return this; + } + + @JsonProperty("processing") + public DestinationMilvusProcessingConfigModel processing; + + public DestinationMilvus withProcessing(DestinationMilvusProcessingConfigModel processing) { + this.processing = processing; + return this; + } + + public DestinationMilvus(@JsonProperty("destinationType") DestinationMilvusMilvus destinationType, @JsonProperty("embedding") Object embedding, @JsonProperty("indexing") DestinationMilvusIndexing indexing, @JsonProperty("processing") DestinationMilvusProcessingConfigModel processing) { + this.destinationType = destinationType; + this.embedding = embedding; + this.indexing = indexing; + this.processing = processing; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingCohere.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingCohere.java new file mode 100755 index 000000000..078df9e32 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingCohere.java @@ -0,0 +1,36 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DestinationMilvusEmbeddingCohere - Use the Cohere API to embed text. + */ + +public class DestinationMilvusEmbeddingCohere { + @JsonProperty("cohere_key") + public String cohereKey; + + public DestinationMilvusEmbeddingCohere withCohereKey(String cohereKey) { + this.cohereKey = cohereKey; + return this; + } + + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("mode") + public DestinationMilvusEmbeddingCohereMode mode; + + public DestinationMilvusEmbeddingCohere withMode(DestinationMilvusEmbeddingCohereMode mode) { + this.mode = mode; + return this; + } + + public DestinationMilvusEmbeddingCohere(@JsonProperty("cohere_key") String cohereKey) { + this.cohereKey = cohereKey; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodStandardMethod.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingCohereMode.java similarity index 62% rename from lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodStandardMethod.java rename to lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingCohereMode.java index a75851ecf..37103b5a9 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodStandardMethod.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingCohereMode.java @@ -6,13 +6,13 @@ import com.fasterxml.jackson.annotation.JsonValue; -public enum SourceMssqlReplicationMethodStandardMethod { - STANDARD("STANDARD"); +public enum DestinationMilvusEmbeddingCohereMode { + COHERE("cohere"); @JsonValue public final String value; - private SourceMssqlReplicationMethodStandardMethod(String value) { + private DestinationMilvusEmbeddingCohereMode(String value) { this.value = value; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFake.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFake.java new file mode 100755 index 000000000..132479ddc --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFake.java @@ -0,0 +1,26 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DestinationMilvusEmbeddingFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. + */ + +public class DestinationMilvusEmbeddingFake { + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("mode") + public DestinationMilvusEmbeddingFakeMode mode; + + public DestinationMilvusEmbeddingFake withMode(DestinationMilvusEmbeddingFakeMode mode) { + this.mode = mode; + return this; + } + + public DestinationMilvusEmbeddingFake(){} +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweatherOpenweather.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFakeMode.java similarity index 66% rename from lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweatherOpenweather.java rename to lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFakeMode.java index 4dbe70ff9..d956a897d 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweatherOpenweather.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFakeMode.java @@ -6,13 +6,13 @@ import com.fasterxml.jackson.annotation.JsonValue; -public enum SourceOpenweatherOpenweather { - OPENWEATHER("openweather"); +public enum DestinationMilvusEmbeddingFakeMode { + FAKE("fake"); @JsonValue public final String value; - private SourceOpenweatherOpenweather(String value) { + private DestinationMilvusEmbeddingFakeMode(String value) { this.value = value; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFromField.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFromField.java new file mode 100755 index 000000000..cb2337c70 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFromField.java @@ -0,0 +1,51 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DestinationMilvusEmbeddingFromField - Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store. + */ + +public class DestinationMilvusEmbeddingFromField { + /** + * The number of dimensions the embedding model is generating + */ + @JsonProperty("dimensions") + public Long dimensions; + + public DestinationMilvusEmbeddingFromField withDimensions(Long dimensions) { + this.dimensions = dimensions; + return this; + } + + /** + * Name of the field in the record that contains the embedding + */ + @JsonProperty("field_name") + public String fieldName; + + public DestinationMilvusEmbeddingFromField withFieldName(String fieldName) { + this.fieldName = fieldName; + return this; + } + + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("mode") + public DestinationMilvusEmbeddingFromFieldMode mode; + + public DestinationMilvusEmbeddingFromField withMode(DestinationMilvusEmbeddingFromFieldMode mode) { + this.mode = mode; + return this; + } + + public DestinationMilvusEmbeddingFromField(@JsonProperty("dimensions") Long dimensions, @JsonProperty("field_name") String fieldName) { + this.dimensions = dimensions; + this.fieldName = fieldName; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFromFieldMode.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFromFieldMode.java new file mode 100755 index 000000000..287ffdae1 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingFromFieldMode.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum DestinationMilvusEmbeddingFromFieldMode { + FROM_FIELD("from_field"); + + @JsonValue + public final String value; + + private DestinationMilvusEmbeddingFromFieldMode(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingOpenAI.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingOpenAI.java new file mode 100755 index 000000000..d9555e209 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingOpenAI.java @@ -0,0 +1,36 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DestinationMilvusEmbeddingOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. + */ + +public class DestinationMilvusEmbeddingOpenAI { + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("mode") + public DestinationMilvusEmbeddingOpenAIMode mode; + + public DestinationMilvusEmbeddingOpenAI withMode(DestinationMilvusEmbeddingOpenAIMode mode) { + this.mode = mode; + return this; + } + + @JsonProperty("openai_key") + public String openaiKey; + + public DestinationMilvusEmbeddingOpenAI withOpenaiKey(String openaiKey) { + this.openaiKey = openaiKey; + return this; + } + + public DestinationMilvusEmbeddingOpenAI(@JsonProperty("openai_key") String openaiKey) { + this.openaiKey = openaiKey; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingOpenAIMode.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingOpenAIMode.java new file mode 100755 index 000000000..1d6faac51 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusEmbeddingOpenAIMode.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum DestinationMilvusEmbeddingOpenAIMode { + OPENAI("openai"); + + @JsonValue + public final String value; + + private DestinationMilvusEmbeddingOpenAIMode(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexing.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexing.java new file mode 100755 index 000000000..e3a4a3f2c --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexing.java @@ -0,0 +1,90 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DestinationMilvusIndexing - Indexing configuration + */ + +public class DestinationMilvusIndexing { + /** + * Authentication method + */ + @JsonProperty("auth") + public Object auth; + + public DestinationMilvusIndexing withAuth(Object auth) { + this.auth = auth; + return this; + } + + /** + * The collection to load data into + */ + @JsonProperty("collection") + public String collection; + + public DestinationMilvusIndexing withCollection(String collection) { + this.collection = collection; + return this; + } + + /** + * The database to connect to + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("db") + public String db; + + public DestinationMilvusIndexing withDb(String db) { + this.db = db; + return this; + } + + /** + * The public endpoint of the Milvus instance. + */ + @JsonProperty("host") + public String host; + + public DestinationMilvusIndexing withHost(String host) { + this.host = host; + return this; + } + + /** + * The field in the entity that contains the embedded text + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("text_field") + public String textField; + + public DestinationMilvusIndexing withTextField(String textField) { + this.textField = textField; + return this; + } + + /** + * The field in the entity that contains the vector + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("vector_field") + public String vectorField; + + public DestinationMilvusIndexing withVectorField(String vectorField) { + this.vectorField = vectorField; + return this; + } + + public DestinationMilvusIndexing(@JsonProperty("auth") Object auth, @JsonProperty("collection") String collection, @JsonProperty("host") String host) { + this.auth = auth; + this.collection = collection; + this.host = host; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthAPIToken.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthAPIToken.java new file mode 100755 index 000000000..d505a380a --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthAPIToken.java @@ -0,0 +1,39 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DestinationMilvusIndexingAuthAPIToken - Authenticate using an API token (suitable for Zilliz Cloud) + */ + +public class DestinationMilvusIndexingAuthAPIToken { + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("mode") + public DestinationMilvusIndexingAuthAPITokenMode mode; + + public DestinationMilvusIndexingAuthAPIToken withMode(DestinationMilvusIndexingAuthAPITokenMode mode) { + this.mode = mode; + return this; + } + + /** + * API Token for the Milvus instance + */ + @JsonProperty("token") + public String token; + + public DestinationMilvusIndexingAuthAPIToken withToken(String token) { + this.token = token; + return this; + } + + public DestinationMilvusIndexingAuthAPIToken(@JsonProperty("token") String token) { + this.token = token; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthAPITokenMode.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthAPITokenMode.java new file mode 100755 index 000000000..35699ea8c --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthAPITokenMode.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum DestinationMilvusIndexingAuthAPITokenMode { + TOKEN("token"); + + @JsonValue + public final String value; + + private DestinationMilvusIndexingAuthAPITokenMode(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthNoAuth.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthNoAuth.java new file mode 100755 index 000000000..fea59e8ce --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthNoAuth.java @@ -0,0 +1,26 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DestinationMilvusIndexingAuthNoAuth - Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses) + */ + +public class DestinationMilvusIndexingAuthNoAuth { + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("mode") + public DestinationMilvusIndexingAuthNoAuthMode mode; + + public DestinationMilvusIndexingAuthNoAuth withMode(DestinationMilvusIndexingAuthNoAuthMode mode) { + this.mode = mode; + return this; + } + + public DestinationMilvusIndexingAuthNoAuth(){} +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthNoAuthMode.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthNoAuthMode.java new file mode 100755 index 000000000..78f63548d --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthNoAuthMode.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum DestinationMilvusIndexingAuthNoAuthMode { + NO_AUTH("no_auth"); + + @JsonValue + public final String value; + + private DestinationMilvusIndexingAuthNoAuthMode(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthUsernamePassword.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthUsernamePassword.java new file mode 100755 index 000000000..a566fa3cd --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthUsernamePassword.java @@ -0,0 +1,51 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DestinationMilvusIndexingAuthUsernamePassword - Authenticate using username and password (suitable for self-managed Milvus clusters) + */ + +public class DestinationMilvusIndexingAuthUsernamePassword { + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("mode") + public DestinationMilvusIndexingAuthUsernamePasswordMode mode; + + public DestinationMilvusIndexingAuthUsernamePassword withMode(DestinationMilvusIndexingAuthUsernamePasswordMode mode) { + this.mode = mode; + return this; + } + + /** + * Password for the Milvus instance + */ + @JsonProperty("password") + public String password; + + public DestinationMilvusIndexingAuthUsernamePassword withPassword(String password) { + this.password = password; + return this; + } + + /** + * Username for the Milvus instance + */ + @JsonProperty("username") + public String username; + + public DestinationMilvusIndexingAuthUsernamePassword withUsername(String username) { + this.username = username; + return this; + } + + public DestinationMilvusIndexingAuthUsernamePassword(@JsonProperty("username") String username, @JsonProperty("password") String password) { + this.username = username; + this.password = password; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthUsernamePasswordMode.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthUsernamePasswordMode.java new file mode 100755 index 000000000..e7738c934 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusIndexingAuthUsernamePasswordMode.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum DestinationMilvusIndexingAuthUsernamePasswordMode { + USERNAME_PASSWORD("username_password"); + + @JsonValue + public final String value; + + private DestinationMilvusIndexingAuthUsernamePasswordMode(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogDatadog.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusMilvus.java similarity index 69% rename from lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogDatadog.java rename to lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusMilvus.java index 36dee8f60..9a23d00cc 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogDatadog.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusMilvus.java @@ -6,13 +6,13 @@ import com.fasterxml.jackson.annotation.JsonValue; -public enum SourceDatadogDatadog { - DATADOG("datadog"); +public enum DestinationMilvusMilvus { + MILVUS("milvus"); @JsonValue public final String value; - private SourceDatadogDatadog(String value) { + private DestinationMilvusMilvus(String value) { this.value = value; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusProcessingConfigModel.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusProcessingConfigModel.java new file mode 100755 index 000000000..26fe73dbc --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationMilvusProcessingConfigModel.java @@ -0,0 +1,63 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + + +public class DestinationMilvusProcessingConfigModel { + /** + * Size of overlap between chunks in tokens to store in vector store to better capture relevant context + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("chunk_overlap") + public Long chunkOverlap; + + public DestinationMilvusProcessingConfigModel withChunkOverlap(Long chunkOverlap) { + this.chunkOverlap = chunkOverlap; + return this; + } + + /** + * Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM) + */ + @JsonProperty("chunk_size") + public Long chunkSize; + + public DestinationMilvusProcessingConfigModel withChunkSize(Long chunkSize) { + this.chunkSize = chunkSize; + return this; + } + + /** + * List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("metadata_fields") + public String[] metadataFields; + + public DestinationMilvusProcessingConfigModel withMetadataFields(String[] metadataFields) { + this.metadataFields = metadataFields; + return this; + } + + /** + * List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("text_fields") + public String[] textFields; + + public DestinationMilvusProcessingConfigModel withTextFields(String[] textFields) { + this.textFields = textFields; + return this; + } + + public DestinationMilvusProcessingConfigModel(@JsonProperty("chunk_size") Long chunkSize) { + this.chunkSize = chunkSize; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationPinecone.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPinecone.java new file mode 100755 index 000000000..b7895752d --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPinecone.java @@ -0,0 +1,58 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DestinationPinecone - The values required to configure the destination. + */ + +public class DestinationPinecone { + @JsonProperty("destinationType") + public DestinationPineconePinecone destinationType; + + public DestinationPinecone withDestinationType(DestinationPineconePinecone destinationType) { + this.destinationType = destinationType; + return this; + } + + /** + * Embedding configuration + */ + @JsonProperty("embedding") + public Object embedding; + + public DestinationPinecone withEmbedding(Object embedding) { + this.embedding = embedding; + return this; + } + + /** + * Pinecone is a popular vector store that can be used to store and retrieve embeddings. + */ + @JsonProperty("indexing") + public DestinationPineconeIndexing indexing; + + public DestinationPinecone withIndexing(DestinationPineconeIndexing indexing) { + this.indexing = indexing; + return this; + } + + @JsonProperty("processing") + public DestinationPineconeProcessingConfigModel processing; + + public DestinationPinecone withProcessing(DestinationPineconeProcessingConfigModel processing) { + this.processing = processing; + return this; + } + + public DestinationPinecone(@JsonProperty("destinationType") DestinationPineconePinecone destinationType, @JsonProperty("embedding") Object embedding, @JsonProperty("indexing") DestinationPineconeIndexing indexing, @JsonProperty("processing") DestinationPineconeProcessingConfigModel processing) { + this.destinationType = destinationType; + this.embedding = embedding; + this.indexing = indexing; + this.processing = processing; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingCohere.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingCohere.java new file mode 100755 index 000000000..72018dd45 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingCohere.java @@ -0,0 +1,36 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DestinationPineconeEmbeddingCohere - Use the Cohere API to embed text. + */ + +public class DestinationPineconeEmbeddingCohere { + @JsonProperty("cohere_key") + public String cohereKey; + + public DestinationPineconeEmbeddingCohere withCohereKey(String cohereKey) { + this.cohereKey = cohereKey; + return this; + } + + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("mode") + public DestinationPineconeEmbeddingCohereMode mode; + + public DestinationPineconeEmbeddingCohere withMode(DestinationPineconeEmbeddingCohereMode mode) { + this.mode = mode; + return this; + } + + public DestinationPineconeEmbeddingCohere(@JsonProperty("cohere_key") String cohereKey) { + this.cohereKey = cohereKey; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingCohereMode.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingCohereMode.java new file mode 100755 index 000000000..42230f367 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingCohereMode.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum DestinationPineconeEmbeddingCohereMode { + COHERE("cohere"); + + @JsonValue + public final String value; + + private DestinationPineconeEmbeddingCohereMode(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingFake.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingFake.java new file mode 100755 index 000000000..d8885368a --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingFake.java @@ -0,0 +1,26 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DestinationPineconeEmbeddingFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. + */ + +public class DestinationPineconeEmbeddingFake { + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("mode") + public DestinationPineconeEmbeddingFakeMode mode; + + public DestinationPineconeEmbeddingFake withMode(DestinationPineconeEmbeddingFakeMode mode) { + this.mode = mode; + return this; + } + + public DestinationPineconeEmbeddingFake(){} +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingFakeMode.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingFakeMode.java new file mode 100755 index 000000000..b7234c212 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingFakeMode.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum DestinationPineconeEmbeddingFakeMode { + FAKE("fake"); + + @JsonValue + public final String value; + + private DestinationPineconeEmbeddingFakeMode(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingOpenAI.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingOpenAI.java new file mode 100755 index 000000000..f7adea4a3 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingOpenAI.java @@ -0,0 +1,36 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DestinationPineconeEmbeddingOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. + */ + +public class DestinationPineconeEmbeddingOpenAI { + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("mode") + public DestinationPineconeEmbeddingOpenAIMode mode; + + public DestinationPineconeEmbeddingOpenAI withMode(DestinationPineconeEmbeddingOpenAIMode mode) { + this.mode = mode; + return this; + } + + @JsonProperty("openai_key") + public String openaiKey; + + public DestinationPineconeEmbeddingOpenAI withOpenaiKey(String openaiKey) { + this.openaiKey = openaiKey; + return this; + } + + public DestinationPineconeEmbeddingOpenAI(@JsonProperty("openai_key") String openaiKey) { + this.openaiKey = openaiKey; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingOpenAIMode.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingOpenAIMode.java new file mode 100755 index 000000000..b2f231681 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeEmbeddingOpenAIMode.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum DestinationPineconeEmbeddingOpenAIMode { + OPENAI("openai"); + + @JsonValue + public final String value; + + private DestinationPineconeEmbeddingOpenAIMode(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeIndexing.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeIndexing.java new file mode 100755 index 000000000..8a519f767 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeIndexing.java @@ -0,0 +1,49 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DestinationPineconeIndexing - Pinecone is a popular vector store that can be used to store and retrieve embeddings. + */ + +public class DestinationPineconeIndexing { + /** + * Pinecone index to use + */ + @JsonProperty("index") + public String index; + + public DestinationPineconeIndexing withIndex(String index) { + this.index = index; + return this; + } + + /** + * Pinecone environment to use + */ + @JsonProperty("pinecone_environment") + public String pineconeEnvironment; + + public DestinationPineconeIndexing withPineconeEnvironment(String pineconeEnvironment) { + this.pineconeEnvironment = pineconeEnvironment; + return this; + } + + @JsonProperty("pinecone_key") + public String pineconeKey; + + public DestinationPineconeIndexing withPineconeKey(String pineconeKey) { + this.pineconeKey = pineconeKey; + return this; + } + + public DestinationPineconeIndexing(@JsonProperty("index") String index, @JsonProperty("pinecone_environment") String pineconeEnvironment, @JsonProperty("pinecone_key") String pineconeKey) { + this.index = index; + this.pineconeEnvironment = pineconeEnvironment; + this.pineconeKey = pineconeKey; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourcePublicApisPublicApis.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconePinecone.java similarity index 66% rename from lib/src/main/java/com/airbyte/api/models/shared/SourcePublicApisPublicApis.java rename to lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconePinecone.java index 95647bf78..35e0ae145 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourcePublicApisPublicApis.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconePinecone.java @@ -6,13 +6,13 @@ import com.fasterxml.jackson.annotation.JsonValue; -public enum SourcePublicApisPublicApis { - PUBLIC_APIS("public-apis"); +public enum DestinationPineconePinecone { + PINECONE("pinecone"); @JsonValue public final String value; - private SourcePublicApisPublicApis(String value) { + private DestinationPineconePinecone(String value) { this.value = value; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeProcessingConfigModel.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeProcessingConfigModel.java new file mode 100755 index 000000000..746f05caa --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationPineconeProcessingConfigModel.java @@ -0,0 +1,63 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + + +public class DestinationPineconeProcessingConfigModel { + /** + * Size of overlap between chunks in tokens to store in vector store to better capture relevant context + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("chunk_overlap") + public Long chunkOverlap; + + public DestinationPineconeProcessingConfigModel withChunkOverlap(Long chunkOverlap) { + this.chunkOverlap = chunkOverlap; + return this; + } + + /** + * Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM) + */ + @JsonProperty("chunk_size") + public Long chunkSize; + + public DestinationPineconeProcessingConfigModel withChunkSize(Long chunkSize) { + this.chunkSize = chunkSize; + return this; + } + + /** + * List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("metadata_fields") + public String[] metadataFields; + + public DestinationPineconeProcessingConfigModel withMetadataFields(String[] metadataFields) { + this.metadataFields = metadataFields; + return this; + } + + /** + * List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("text_fields") + public String[] textFields; + + public DestinationPineconeProcessingConfigModel withTextFields(String[] textFields) { + this.textFields = textFields; + return this; + } + + public DestinationPineconeProcessingConfigModel(@JsonProperty("chunk_size") Long chunkSize) { + this.chunkSize = chunkSize; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationSnowflake.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationSnowflake.java index def6ab81e..5dfe7b6f6 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/DestinationSnowflake.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationSnowflake.java @@ -65,7 +65,7 @@ public DestinationSnowflake withJdbcUrlParams(String jdbcUrlParams) { } /** - * (Beta) The schema to write raw tables into + * The schema to write raw tables into */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("raw_data_schema") @@ -98,18 +98,6 @@ public DestinationSnowflake withSchema(String schema) { return this; } - /** - * (Beta) Use <a href="https://github.com/airbytehq/airbyte/issues/26028" target="_blank">Destinations V2</a>. Contact Airbyte Support to participate in the beta program. - */ - @JsonInclude(Include.NON_ABSENT) - @JsonProperty("use_1s1t_format") - public Boolean use1s1tFormat; - - public DestinationSnowflake withUse1s1tFormat(Boolean use1s1tFormat) { - this.use1s1tFormat = use1s1tFormat; - return this; - } - /** * Enter the name of the user you want to use to access the database */ diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationTypesense.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationTypesense.java index f37d56c91..6d81182d6 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/DestinationTypesense.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationTypesense.java @@ -29,9 +29,9 @@ public DestinationTypesense withApiKey(String apiKey) { */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("batch_size") - public String batchSize; + public Long batchSize; - public DestinationTypesense withBatchSize(String batchSize) { + public DestinationTypesense withBatchSize(Long batchSize) { this.batchSize = batchSize; return this; } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/DestinationsResponse.java b/lib/src/main/java/com/airbyte/api/models/shared/DestinationsResponse.java index 9c6bdc886..b9f7807b6 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/DestinationsResponse.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/DestinationsResponse.java @@ -8,9 +8,6 @@ import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -/** - * DestinationsResponse - Successful operation - */ public class DestinationsResponse { @JsonProperty("data") diff --git a/lib/src/main/java/com/airbyte/api/models/shared/Intercom.java b/lib/src/main/java/com/airbyte/api/models/shared/Intercom.java index 5017c5487..3ad1f1e07 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/Intercom.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/Intercom.java @@ -13,6 +13,9 @@ */ public class Intercom { + /** + * Client Id for your Intercom application. + */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("client_id") public String clientId; @@ -22,6 +25,9 @@ public Intercom withClientId(String clientId) { return this; } + /** + * Client Secret for your Intercom application. + */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("client_secret") public String clientSecret; diff --git a/lib/src/main/java/com/airbyte/api/models/shared/JobStatusEnum.java b/lib/src/main/java/com/airbyte/api/models/shared/JobStatusEnum.java index c6223757a..113015eb1 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/JobStatusEnum.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/JobStatusEnum.java @@ -6,9 +6,6 @@ import com.fasterxml.jackson.annotation.JsonValue; -/** - * JobStatusEnum - The Job status you want to filter by - */ public enum JobStatusEnum { PENDING("pending"), RUNNING("running"), diff --git a/lib/src/main/java/com/airbyte/api/models/shared/JobsResponse.java b/lib/src/main/java/com/airbyte/api/models/shared/JobsResponse.java index 31090e82b..8ab2ae76e 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/JobsResponse.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/JobsResponse.java @@ -8,9 +8,6 @@ import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -/** - * JobsResponse - List all the Jobs by connectionId. - */ public class JobsResponse { @JsonProperty("data") diff --git a/lib/src/main/java/com/airbyte/api/models/shared/LinkedinAdsCredentials.java b/lib/src/main/java/com/airbyte/api/models/shared/LinkedinAdsCredentials.java index e48f64841..8a33c5abd 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/LinkedinAdsCredentials.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/LinkedinAdsCredentials.java @@ -11,7 +11,7 @@ public class LinkedinAdsCredentials { /** - * The client ID of the LinkedIn Ads developer application. + * The client ID of your developer application. Refer to our <a href='https://docs.airbyte.com/integrations/sources/linkedin-ads#setup-guide'>documentation</a> for more information. */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("client_id") @@ -23,7 +23,7 @@ public LinkedinAdsCredentials withClientId(String clientId) { } /** - * The client secret the LinkedIn Ads developer application. + * The client secret of your developer application. Refer to our <a href='https://docs.airbyte.com/integrations/sources/linkedin-ads#setup-guide'>documentation</a> for more information. */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("client_secret") diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SchemeBasicAuth.java b/lib/src/main/java/com/airbyte/api/models/shared/SchemeBasicAuth.java new file mode 100755 index 000000000..cc649c374 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SchemeBasicAuth.java @@ -0,0 +1,32 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.airbyte.api.utils.SpeakeasyMetadata; +import com.fasterxml.jackson.annotation.JsonProperty; + + +public class SchemeBasicAuth { + @SpeakeasyMetadata("security:name=password") + public String password; + + public SchemeBasicAuth withPassword(String password) { + this.password = password; + return this; + } + + @SpeakeasyMetadata("security:name=username") + public String username; + + public SchemeBasicAuth withUsername(String username) { + this.username = username; + return this; + } + + public SchemeBasicAuth(@JsonProperty("Username") String username, @JsonProperty("Password") String password) { + this.username = username; + this.password = password; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/Security.java b/lib/src/main/java/com/airbyte/api/models/shared/Security.java index fa58a1c00..a64e501e7 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/Security.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/Security.java @@ -5,10 +5,17 @@ package com.airbyte.api.models.shared; import com.airbyte.api.utils.SpeakeasyMetadata; -import com.fasterxml.jackson.annotation.JsonProperty; public class Security { + @SpeakeasyMetadata("security:scheme=true,type=http,subtype=basic") + public SchemeBasicAuth basicAuth; + + public Security withBasicAuth(SchemeBasicAuth basicAuth) { + this.basicAuth = basicAuth; + return this; + } + @SpeakeasyMetadata("security:scheme=true,type=http,subtype=bearer,name=Authorization") public String bearerAuth; @@ -17,7 +24,5 @@ public Security withBearerAuth(String bearerAuth) { return this; } - public Security(@JsonProperty("bearerAuth") String bearerAuth) { - this.bearerAuth = bearerAuth; - } + public Security(){} } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceAmazonAds.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceAmazonAds.java index e28dc6b48..39cbc357b 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceAmazonAds.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceAmazonAds.java @@ -57,7 +57,19 @@ public SourceAmazonAds withLookBackWindow(Long lookBackWindow) { } /** - * Profile IDs you want to fetch data for. See <a href="https://advertising.amazon.com/API/docs/en-us/concepts/authorization/profiles">docs</a> for more details. + * Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("marketplace_ids") + public String[] marketplaceIds; + + public SourceAmazonAds withMarketplaceIds(String[] marketplaceIds) { + this.marketplaceIds = marketplaceIds; + return this; + } + + /** + * Profile IDs you want to fetch data for. See <a href="https://advertising.amazon.com/API/docs/en-us/concepts/authorization/profiles">docs</a> for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID. */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("profiles") diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceApifyDataset.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceApifyDataset.java index f024a27ef..840c52561 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceApifyDataset.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceApifyDataset.java @@ -28,6 +28,7 @@ public SourceApifyDataset withClean(Boolean clean) { /** * ID of the dataset you would like to load to Airbyte. */ + @JsonInclude(Include.NON_ABSENT) @JsonProperty("datasetId") public String datasetId; @@ -44,8 +45,19 @@ public SourceApifyDataset withSourceType(SourceApifyDatasetApifyDataset sourceTy return this; } - public SourceApifyDataset(@JsonProperty("datasetId") String datasetId, @JsonProperty("sourceType") SourceApifyDatasetApifyDataset sourceType) { - this.datasetId = datasetId; + /** + * Your application's Client Secret. You can find this value on the <a href="https://console.apify.com/account/integrations">console integrations tab</a> after you login. + */ + @JsonProperty("token") + public String token; + + public SourceApifyDataset withToken(String token) { + this.token = token; + return this; + } + + public SourceApifyDataset(@JsonProperty("sourceType") SourceApifyDatasetApifyDataset sourceType, @JsonProperty("token") String token) { this.sourceType = sourceType; + this.token = token; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceAppfollow.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceAppfollow.java index 633066a86..e9dd3730b 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceAppfollow.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceAppfollow.java @@ -4,6 +4,8 @@ package com.airbyte.api.models.shared; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; /** @@ -12,8 +14,9 @@ public class SourceAppfollow { /** - * api secret provided by Appfollow + * API Key provided by Appfollow */ + @JsonInclude(Include.NON_ABSENT) @JsonProperty("api_secret") public String apiSecret; @@ -22,39 +25,6 @@ public SourceAppfollow withApiSecret(String apiSecret) { return this; } - /** - * client id provided by Appfollow - */ - @JsonProperty("cid") - public String cid; - - public SourceAppfollow withCid(String cid) { - this.cid = cid; - return this; - } - - /** - * getting data by Country - */ - @JsonProperty("country") - public String country; - - public SourceAppfollow withCountry(String country) { - this.country = country; - return this; - } - - /** - * for App Store — this is 9-10 digits identification number; for Google Play — this is bundle name; - */ - @JsonProperty("ext_id") - public String extId; - - public SourceAppfollow withExtId(String extId) { - this.extId = extId; - return this; - } - @JsonProperty("sourceType") public SourceAppfollowAppfollow sourceType; @@ -63,11 +33,7 @@ public SourceAppfollow withSourceType(SourceAppfollowAppfollow sourceType) { return this; } - public SourceAppfollow(@JsonProperty("api_secret") String apiSecret, @JsonProperty("cid") String cid, @JsonProperty("country") String country, @JsonProperty("ext_id") String extId, @JsonProperty("sourceType") SourceAppfollowAppfollow sourceType) { - this.apiSecret = apiSecret; - this.cid = cid; - this.country = country; - this.extId = extId; + public SourceAppfollow(@JsonProperty("sourceType") SourceAppfollowAppfollow sourceType) { this.sourceType = sourceType; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceAuth0.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceAuth0.java index c054a4484..bb3057164 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceAuth0.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceAuth0.java @@ -4,6 +4,8 @@ package com.airbyte.api.models.shared; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; /** @@ -38,6 +40,18 @@ public SourceAuth0 withSourceType(SourceAuth0Auth0 sourceType) { return this; } + /** + * UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("start_date") + public String startDate; + + public SourceAuth0 withStartDate(String startDate) { + this.startDate = startDate; + return this; + } + public SourceAuth0(@JsonProperty("base_url") String baseUrl, @JsonProperty("credentials") Object credentials, @JsonProperty("sourceType") SourceAuth0Auth0 sourceType) { this.baseUrl = baseUrl; this.credentials = credentials; diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceDatadog.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceDatadog.java deleted file mode 100755 index b5477f5f7..000000000 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceDatadog.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - */ - -package com.airbyte.api.models.shared; - -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; - -/** - * SourceDatadog - The values required to configure the source. - */ - -public class SourceDatadog { - /** - * Datadog API key - */ - @JsonProperty("api_key") - public String apiKey; - - public SourceDatadog withApiKey(String apiKey) { - this.apiKey = apiKey; - return this; - } - - /** - * Datadog application key - */ - @JsonProperty("application_key") - public String applicationKey; - - public SourceDatadog withApplicationKey(String applicationKey) { - this.applicationKey = applicationKey; - return this; - } - - /** - * UTC date and time in the format 2017-01-25T00:00:00Z. Data after this date will not be replicated. An empty value will represent the current datetime for each execution. This just applies to Incremental syncs. - */ - @JsonInclude(Include.NON_ABSENT) - @JsonProperty("end_date") - public String endDate; - - public SourceDatadog withEndDate(String endDate) { - this.endDate = endDate; - return this; - } - - /** - * Maximum number of records to collect per request. - */ - @JsonInclude(Include.NON_ABSENT) - @JsonProperty("max_records_per_request") - public Long maxRecordsPerRequest; - - public SourceDatadog withMaxRecordsPerRequest(Long maxRecordsPerRequest) { - this.maxRecordsPerRequest = maxRecordsPerRequest; - return this; - } - - /** - * List of queries to be run and used as inputs. - */ - @JsonInclude(Include.NON_ABSENT) - @JsonProperty("queries") - public SourceDatadogQueries[] queries; - - public SourceDatadog withQueries(SourceDatadogQueries[] queries) { - this.queries = queries; - return this; - } - - /** - * The search query. This just applies to Incremental syncs. If empty, it'll collect all logs. - */ - @JsonInclude(Include.NON_ABSENT) - @JsonProperty("query") - public String query; - - public SourceDatadog withQuery(String query) { - this.query = query; - return this; - } - - /** - * The site where Datadog data resides in. - */ - @JsonInclude(Include.NON_ABSENT) - @JsonProperty("site") - public SourceDatadogSite site; - - public SourceDatadog withSite(SourceDatadogSite site) { - this.site = site; - return this; - } - - @JsonProperty("sourceType") - public SourceDatadogDatadog sourceType; - - public SourceDatadog withSourceType(SourceDatadogDatadog sourceType) { - this.sourceType = sourceType; - return this; - } - - /** - * UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. This just applies to Incremental syncs. - */ - @JsonInclude(Include.NON_ABSENT) - @JsonProperty("start_date") - public String startDate; - - public SourceDatadog withStartDate(String startDate) { - this.startDate = startDate; - return this; - } - - public SourceDatadog(@JsonProperty("api_key") String apiKey, @JsonProperty("application_key") String applicationKey, @JsonProperty("sourceType") SourceDatadogDatadog sourceType) { - this.apiKey = apiKey; - this.applicationKey = applicationKey; - this.sourceType = sourceType; - } -} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogQueries.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogQueries.java deleted file mode 100755 index 867eea160..000000000 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogQueries.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - */ - -package com.airbyte.api.models.shared; - -import com.fasterxml.jackson.annotation.JsonProperty; - - -public class SourceDatadogQueries { - /** - * A data source that is powered by the platform. - */ - @JsonProperty("data_source") - public SourceDatadogQueriesDataSource dataSource; - - public SourceDatadogQueries withDataSource(SourceDatadogQueriesDataSource dataSource) { - this.dataSource = dataSource; - return this; - } - - /** - * The variable name for use in queries. - */ - @JsonProperty("name") - public String name; - - public SourceDatadogQueries withName(String name) { - this.name = name; - return this; - } - - /** - * A classic query string. - */ - @JsonProperty("query") - public String query; - - public SourceDatadogQueries withQuery(String query) { - this.query = query; - return this; - } - - public SourceDatadogQueries(@JsonProperty("data_source") SourceDatadogQueriesDataSource dataSource, @JsonProperty("name") String name, @JsonProperty("query") String query) { - this.dataSource = dataSource; - this.name = name; - this.query = query; - } -} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogQueriesDataSource.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogQueriesDataSource.java deleted file mode 100755 index ecd0a3c67..000000000 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogQueriesDataSource.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - */ - -package com.airbyte.api.models.shared; - -import com.fasterxml.jackson.annotation.JsonValue; - -/** - * SourceDatadogQueriesDataSource - A data source that is powered by the platform. - */ -public enum SourceDatadogQueriesDataSource { - METRICS("metrics"), - CLOUD_COST("cloud_cost"), - LOGS("logs"), - RUM("rum"); - - @JsonValue - public final String value; - - private SourceDatadogQueriesDataSource(String value) { - this.value = value; - } -} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogSite.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogSite.java deleted file mode 100755 index e07e53249..000000000 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceDatadogSite.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - */ - -package com.airbyte.api.models.shared; - -import com.fasterxml.jackson.annotation.JsonValue; - -/** - * SourceDatadogSite - The site where Datadog data resides in. - */ -public enum SourceDatadogSite { - DATADOGHQ_COM("datadoghq.com"), - US3_DATADOGHQ_COM("us3.datadoghq.com"), - US5_DATADOGHQ_COM("us5.datadoghq.com"), - DATADOGHQ_EU("datadoghq.eu"), - DDOG_GOV_COM("ddog-gov.com"); - - @JsonValue - public final String value; - - private SourceDatadogSite(String value) { - this.value = value; - } -} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleAds.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleAds.java index d8a8bf8c0..2895f574b 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleAds.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleAds.java @@ -56,7 +56,7 @@ public SourceGoogleAds withCustomerId(String customerId) { } /** - * UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. + * UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. (Default value of today is used if not set) */ @JsonInclude(Include.NON_ABSENT) @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd") @@ -89,8 +89,9 @@ public SourceGoogleAds withSourceType(SourceGoogleAdsGoogleAds sourceType) { } /** - * UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. + * UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. (Default value of two years ago is used if not set) */ + @JsonInclude(Include.NON_ABSENT) @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd") @JsonProperty("start_date") public LocalDate startDate; @@ -100,10 +101,9 @@ public SourceGoogleAds withStartDate(LocalDate startDate) { return this; } - public SourceGoogleAds(@JsonProperty("credentials") SourceGoogleAdsGoogleCredentials credentials, @JsonProperty("customer_id") String customerId, @JsonProperty("sourceType") SourceGoogleAdsGoogleAds sourceType, @JsonProperty("start_date") LocalDate startDate) { + public SourceGoogleAds(@JsonProperty("credentials") SourceGoogleAdsGoogleCredentials credentials, @JsonProperty("customer_id") String customerId, @JsonProperty("sourceType") SourceGoogleAdsGoogleAds sourceType) { this.credentials = credentials; this.customerId = customerId; this.sourceType = sourceType; - this.startDate = startDate; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsole.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsole.java index 8d6180267..d8765e4c9 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsole.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsole.java @@ -24,7 +24,7 @@ public SourceGoogleSearchConsole withAuthorization(Object authorization) { } /** - * A JSON array describing the custom reports you want to sync from Google Search Console. See <a href="https://docs.airbyte.com/integrations/sources/google-search-console#step-2-set-up-the-google-search-console-connector-in-airbyte">the docs</a> for more information about the exact format you can use to fill out this field. + * (DEPRCATED) A JSON array describing the custom reports you want to sync from Google Search Console. See our <a href='https://docs.airbyte.com/integrations/sources/google-search-console'>documentation</a> for more information on formulating custom reports. */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("custom_reports") @@ -36,19 +36,31 @@ public SourceGoogleSearchConsole withCustomReports(String customReports) { } /** - * If "final" or if this parameter is omitted, the returned data will include only finalized data. Setting this parameter to "all" should not be used with Incremental Sync mode as it may cause data loss. If "all", data will include fresh data. + * You can add your Custom Analytics report by creating one. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("custom_reports_array") + public SourceGoogleSearchConsoleCustomReportConfig[] customReportsArray; + + public SourceGoogleSearchConsole withCustomReportsArray(SourceGoogleSearchConsoleCustomReportConfig[] customReportsArray) { + this.customReportsArray = customReportsArray; + return this; + } + + /** + * If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our <a href='https://docs.airbyte.com/integrations/source/google-search-console'>full documentation</a>. */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("data_state") - public SourceGoogleSearchConsoleDataState dataState; + public SourceGoogleSearchConsoleDataFreshness dataState; - public SourceGoogleSearchConsole withDataState(SourceGoogleSearchConsoleDataState dataState) { + public SourceGoogleSearchConsole withDataState(SourceGoogleSearchConsoleDataFreshness dataState) { this.dataState = dataState; return this; } /** - * UTC date in the format 2017-01-25. Any data after this date will not be replicated. Must be greater or equal to the start date field. + * UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward. */ @JsonInclude(Include.NON_ABSENT) @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd") @@ -61,7 +73,7 @@ public SourceGoogleSearchConsole withEndDate(LocalDate endDate) { } /** - * The URLs of the website property attached to your GSC account. Read more <a href="https://support.google.com/webmasters/answer/34592?hl=en">here</a>. + * The URLs of the website property attached to your GSC account. Learn more about properties <a href="https://support.google.com/webmasters/answer/34592?hl=en">here</a>. */ @JsonProperty("site_urls") public String[] siteUrls; @@ -80,8 +92,9 @@ public SourceGoogleSearchConsole withSourceType(SourceGoogleSearchConsoleGoogleS } /** - * UTC date in the format 2017-01-25. Any data before this date will not be replicated. + * UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. */ + @JsonInclude(Include.NON_ABSENT) @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd") @JsonProperty("start_date") public LocalDate startDate; @@ -91,10 +104,9 @@ public SourceGoogleSearchConsole withStartDate(LocalDate startDate) { return this; } - public SourceGoogleSearchConsole(@JsonProperty("authorization") Object authorization, @JsonProperty("site_urls") String[] siteUrls, @JsonProperty("sourceType") SourceGoogleSearchConsoleGoogleSearchConsole sourceType, @JsonProperty("start_date") LocalDate startDate) { + public SourceGoogleSearchConsole(@JsonProperty("authorization") Object authorization, @JsonProperty("site_urls") String[] siteUrls, @JsonProperty("sourceType") SourceGoogleSearchConsoleGoogleSearchConsole sourceType) { this.authorization = authorization; this.siteUrls = siteUrls; this.sourceType = sourceType; - this.startDate = startDate; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleCustomReportConfig.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleCustomReportConfig.java new file mode 100755 index 000000000..af0c929ce --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleCustomReportConfig.java @@ -0,0 +1,37 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonProperty; + + +public class SourceGoogleSearchConsoleCustomReportConfig { + /** + * A list of dimensions (country, date, device, page, query) + */ + @JsonProperty("dimensions") + public SourceGoogleSearchConsoleCustomReportConfigValidEnums[] dimensions; + + public SourceGoogleSearchConsoleCustomReportConfig withDimensions(SourceGoogleSearchConsoleCustomReportConfigValidEnums[] dimensions) { + this.dimensions = dimensions; + return this; + } + + /** + * The name of the custom report, this name would be used as stream name + */ + @JsonProperty("name") + public String name; + + public SourceGoogleSearchConsoleCustomReportConfig withName(String name) { + this.name = name; + return this; + } + + public SourceGoogleSearchConsoleCustomReportConfig(@JsonProperty("dimensions") SourceGoogleSearchConsoleCustomReportConfigValidEnums[] dimensions, @JsonProperty("name") String name) { + this.dimensions = dimensions; + this.name = name; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleCustomReportConfigValidEnums.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleCustomReportConfigValidEnums.java new file mode 100755 index 000000000..fd47c4aa8 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleCustomReportConfigValidEnums.java @@ -0,0 +1,25 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * SourceGoogleSearchConsoleCustomReportConfigValidEnums - An enumeration of dimensions. + */ +public enum SourceGoogleSearchConsoleCustomReportConfigValidEnums { + COUNTRY("country"), + DATE("date"), + DEVICE("device"), + PAGE("page"), + QUERY("query"); + + @JsonValue + public final String value; + + private SourceGoogleSearchConsoleCustomReportConfigValidEnums(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleDataFreshness.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleDataFreshness.java new file mode 100755 index 000000000..55fbbb9a5 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleDataFreshness.java @@ -0,0 +1,22 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * SourceGoogleSearchConsoleDataFreshness - If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our <a href='https://docs.airbyte.com/integrations/source/google-search-console'>full documentation</a>. + */ +public enum SourceGoogleSearchConsoleDataFreshness { + FINAL_("final"), + ALL("all"); + + @JsonValue + public final String value; + + private SourceGoogleSearchConsoleDataFreshness(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleDataState.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleDataState.java deleted file mode 100755 index a2bfafced..000000000 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSearchConsoleDataState.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - */ - -package com.airbyte.api.models.shared; - -import com.fasterxml.jackson.annotation.JsonValue; - -/** - * SourceGoogleSearchConsoleDataState - If "final" or if this parameter is omitted, the returned data will include only finalized data. Setting this parameter to "all" should not be used with Incremental Sync mode as it may cause data loss. If "all", data will include fresh data. - */ -public enum SourceGoogleSearchConsoleDataState { - FINAL_("final"), - ALL("all"); - - @JsonValue - public final String value; - - private SourceGoogleSearchConsoleDataState(String value) { - this.value = value; - } -} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSheets.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSheets.java index 75034df7f..3d1c1bf25 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSheets.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceGoogleSheets.java @@ -36,18 +36,6 @@ public SourceGoogleSheets withNamesConversion(Boolean namesConversion) { return this; } - /** - * The number of rows fetched when making a Google Sheet API call. Defaults to 200. - */ - @JsonInclude(Include.NON_ABSENT) - @JsonProperty("row_batch_size") - public Long rowBatchSize; - - public SourceGoogleSheets withRowBatchSize(Long rowBatchSize) { - this.rowBatchSize = rowBatchSize; - return this; - } - @JsonProperty("sourceType") public SourceGoogleSheetsGoogleSheets sourceType; diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceIntercom.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceIntercom.java index 0bffaec69..1e24a1f35 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceIntercom.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceIntercom.java @@ -6,6 +6,8 @@ import com.airbyte.api.utils.DateTimeDeserializer; import com.airbyte.api.utils.DateTimeSerializer; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; @@ -27,6 +29,30 @@ public SourceIntercom withAccessToken(String accessToken) { return this; } + /** + * Client Id for your Intercom application. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("client_id") + public String clientId; + + public SourceIntercom withClientId(String clientId) { + this.clientId = clientId; + return this; + } + + /** + * Client Secret for your Intercom application. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("client_secret") + public String clientSecret; + + public SourceIntercom withClientSecret(String clientSecret) { + this.clientSecret = clientSecret; + return this; + } + @JsonProperty("sourceType") public SourceIntercomIntercom sourceType; diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceLemlist.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceLemlist.java index 268352a61..0395d47e1 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceLemlist.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceLemlist.java @@ -12,7 +12,7 @@ public class SourceLemlist { /** - * Lemlist API key. + * Lemlist API key, */ @JsonProperty("api_key") public String apiKey; diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAds.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAds.java index ce21e705e..65b60f634 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAds.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAds.java @@ -16,7 +16,7 @@ public class SourceLinkedinAds { /** - * Specify the account IDs separated by a space, to pull the data from. Leave empty, if you want to pull the data from all associated accounts. See the <a href="https://www.linkedin.com/help/linkedin/answer/a424270/find-linkedin-ads-account-details?lang=en">LinkedIn Ads docs</a> for more info. + * Specify the account IDs to pull data from, separated by a space. Leave this field empty if you want to pull the data from all accounts accessible by the authenticated user. See the <a href="https://www.linkedin.com/help/linkedin/answer/a424270/find-linkedin-ads-account-details?lang=en">LinkedIn docs</a> to locate these IDs. */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("account_ids") @@ -54,7 +54,7 @@ public SourceLinkedinAds withSourceType(SourceLinkedinAdsLinkedinAds sourceType) } /** - * UTC date in the format 2020-09-17. Any data before this date will not be replicated. + * UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. */ @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd") @JsonProperty("start_date") diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfiguration.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfiguration.java index 8b435c25a..05a070510 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfiguration.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfiguration.java @@ -12,7 +12,7 @@ public class SourceLinkedinAdsAdAnalyticsReportConfiguration { /** - * The name for the report + * The name for the custom report. */ @JsonProperty("name") public String name; @@ -23,22 +23,18 @@ public SourceLinkedinAdsAdAnalyticsReportConfiguration withName(String name) { } /** - * Select value from list to pivot by + * Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives. */ @JsonProperty("pivot_by") - public SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy pivotBy; + public SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory pivotBy; - public SourceLinkedinAdsAdAnalyticsReportConfiguration withPivotBy(SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy pivotBy) { + public SourceLinkedinAdsAdAnalyticsReportConfiguration withPivotBy(SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory pivotBy) { this.pivotBy = pivotBy; return this; } /** - * Set time granularity for report: - * ALL - Results grouped into a single result across the entire time range of the report. - * DAILY - Results grouped by day. - * MONTHLY - Results grouped by month. - * YEARLY - Results grouped by year. + * Choose how to group the data in your report by time. The options are:<br>- 'ALL': A single result summarizing the entire time range.<br>- 'DAILY': Group results by each day.<br>- 'MONTHLY': Group results by each month.<br>- 'YEARLY': Group results by each year.<br>Selecting a time grouping helps you analyze trends and patterns over different time periods. */ @JsonProperty("time_granularity") public SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity timeGranularity; @@ -48,7 +44,7 @@ public SourceLinkedinAdsAdAnalyticsReportConfiguration withTimeGranularity(Sourc return this; } - public SourceLinkedinAdsAdAnalyticsReportConfiguration(@JsonProperty("name") String name, @JsonProperty("pivot_by") SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy pivotBy, @JsonProperty("time_granularity") SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity timeGranularity) { + public SourceLinkedinAdsAdAnalyticsReportConfiguration(@JsonProperty("name") String name, @JsonProperty("pivot_by") SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory pivotBy, @JsonProperty("time_granularity") SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity timeGranularity) { this.name = name; this.pivotBy = pivotBy; this.timeGranularity = timeGranularity; diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory.java similarity index 79% rename from lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy.java rename to lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory.java index d8518d90e..04fafd4d4 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory.java @@ -7,9 +7,9 @@ import com.fasterxml.jackson.annotation.JsonValue; /** - * SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy - Select value from list to pivot by + * SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory - Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives. */ -public enum SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy { +public enum SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory { COMPANY("COMPANY"), ACCOUNT("ACCOUNT"), SHARE("SHARE"), @@ -35,7 +35,7 @@ public enum SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy { @JsonValue public final String value; - private SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy(String value) { + private SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory(String value) { this.value = value; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity.java index 14cba5bbe..5da650cee 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity.java @@ -7,11 +7,7 @@ import com.fasterxml.jackson.annotation.JsonValue; /** - * SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity - Set time granularity for report: - * ALL - Results grouped into a single result across the entire time range of the report. - * DAILY - Results grouped by day. - * MONTHLY - Results grouped by month. - * YEARLY - Results grouped by year. + * SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity - Choose how to group the data in your report by time. The options are:<br>- 'ALL': A single result summarizing the entire time range.<br>- 'DAILY': Group results by each day.<br>- 'MONTHLY': Group results by each month.<br>- 'YEARLY': Group results by each year.<br>Selecting a time grouping helps you analyze trends and patterns over different time periods. */ public enum SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity { ALL("ALL"), diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsCredentialsAccessToken.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsCredentialsAccessToken.java index de1944dde..99eb0741b 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsCredentialsAccessToken.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsCredentialsAccessToken.java @@ -11,7 +11,7 @@ public class SourceLinkedinAdsCredentialsAccessToken { /** - * The token value generated using the authentication code. See the <a href="https://docs.airbyte.com/integrations/sources/linkedin-ads#authentication">docs</a> to obtain yours. + * The access token generated for your developer application. Refer to our <a href='https://docs.airbyte.com/integrations/sources/linkedin-ads#setup-guide'>documentation</a> for more information. */ @JsonProperty("access_token") public String accessToken; diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsCredentialsOAuth20.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsCredentialsOAuth20.java index 47c54bf3f..d8fbbb57f 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsCredentialsOAuth20.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceLinkedinAdsCredentialsOAuth20.java @@ -20,7 +20,7 @@ public SourceLinkedinAdsCredentialsOAuth20 withAuthMethod(SourceLinkedinAdsCrede } /** - * The client ID of the LinkedIn Ads developer application. + * The client ID of your developer application. Refer to our <a href='https://docs.airbyte.com/integrations/sources/linkedin-ads#setup-guide'>documentation</a> for more information. */ @JsonProperty("client_id") public String clientId; @@ -31,7 +31,7 @@ public SourceLinkedinAdsCredentialsOAuth20 withClientId(String clientId) { } /** - * The client secret the LinkedIn Ads developer application. + * The client secret of your developer application. Refer to our <a href='https://docs.airbyte.com/integrations/sources/linkedin-ads#setup-guide'>documentation</a> for more information. */ @JsonProperty("client_secret") public String clientSecret; @@ -42,7 +42,7 @@ public SourceLinkedinAdsCredentialsOAuth20 withClientSecret(String clientSecret) } /** - * The key to refresh the expired access token. + * The key to refresh the expired access token. Refer to our <a href='https://docs.airbyte.com/integrations/sources/linkedin-ads#setup-guide'>documentation</a> for more information. */ @JsonProperty("refresh_token") public String refreshToken; diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssql.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssql.java index bf0853b6f..89b36463c 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssql.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssql.java @@ -71,7 +71,7 @@ public SourceMssql withPort(Long port) { } /** - * The replication method used for extracting data from the database. STANDARD replication requires no setup on the DB side but will not be able to represent deletions incrementally. CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself. + * Configures how data is extracted from the database. */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("replication_method") diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync.java deleted file mode 100755 index 1b115831b..000000000 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - */ - -package com.airbyte.api.models.shared; - -import com.fasterxml.jackson.annotation.JsonValue; - -/** - * SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync - What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC. - */ -public enum SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync { - EXISTING_AND_NEW("Existing and New"), - NEW_CHANGES_ONLY("New Changes Only"); - - @JsonValue - public final String value; - - private SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync(String value) { - this.value = value; - } -} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel.java deleted file mode 100755 index ff5211cb5..000000000 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - */ - -package com.airbyte.api.models.shared; - -import com.fasterxml.jackson.annotation.JsonValue; - -/** - * SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel - Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the <a href="https://docs.microsoft.com/en-us/dotnet/framework/data/adonet/sql/snapshot-isolation-in-sql-server">snapshot isolation mode</a> on the database. - */ -public enum SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel { - SNAPSHOT("Snapshot"), - READ_COMMITTED("Read Committed"); - - @JsonValue - public final String value; - - private SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel(String value) { - this.value = value; - } -} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDC.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC.java similarity index 54% rename from lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDC.java rename to lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC.java index 9d00efd63..803cc4489 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDC.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC.java @@ -9,18 +9,18 @@ import com.fasterxml.jackson.annotation.JsonProperty; /** - * SourceMssqlReplicationMethodLogicalReplicationCDC - CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself. + * SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC - <i>Recommended</i> - Incrementally reads new inserts, updates, and deletes using the SQL Server's <a href="https://docs.airbyte.com/integrations/sources/mssql/#change-data-capture-cdc">change data capture feature</a>. This must be enabled on your database. */ -public class SourceMssqlReplicationMethodLogicalReplicationCDC { +public class SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC { /** * What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC. */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("data_to_sync") - public SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync dataToSync; + public SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync dataToSync; - public SourceMssqlReplicationMethodLogicalReplicationCDC withDataToSync(SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync dataToSync) { + public SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC withDataToSync(SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync dataToSync) { this.dataToSync = dataToSync; return this; } @@ -32,15 +32,15 @@ public SourceMssqlReplicationMethodLogicalReplicationCDC withDataToSync(SourceMs @JsonProperty("initial_waiting_seconds") public Long initialWaitingSeconds; - public SourceMssqlReplicationMethodLogicalReplicationCDC withInitialWaitingSeconds(Long initialWaitingSeconds) { + public SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC withInitialWaitingSeconds(Long initialWaitingSeconds) { this.initialWaitingSeconds = initialWaitingSeconds; return this; } @JsonProperty("method") - public SourceMssqlReplicationMethodLogicalReplicationCDCMethod method; + public SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod method; - public SourceMssqlReplicationMethodLogicalReplicationCDC withMethod(SourceMssqlReplicationMethodLogicalReplicationCDCMethod method) { + public SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC withMethod(SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod method) { this.method = method; return this; } @@ -50,14 +50,14 @@ public SourceMssqlReplicationMethodLogicalReplicationCDC withMethod(SourceMssqlR */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("snapshot_isolation") - public SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel snapshotIsolation; + public SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel snapshotIsolation; - public SourceMssqlReplicationMethodLogicalReplicationCDC withSnapshotIsolation(SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel snapshotIsolation) { + public SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC withSnapshotIsolation(SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel snapshotIsolation) { this.snapshotIsolation = snapshotIsolation; return this; } - public SourceMssqlReplicationMethodLogicalReplicationCDC(@JsonProperty("method") SourceMssqlReplicationMethodLogicalReplicationCDCMethod method) { + public SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDC(@JsonProperty("method") SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod method) { this.method = method; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync.java new file mode 100755 index 000000000..7e4488886 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync.java @@ -0,0 +1,22 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync - What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC. + */ +public enum SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync { + EXISTING_AND_NEW("Existing and New"), + NEW_CHANGES_ONLY("New Changes Only"); + + @JsonValue + public final String value; + + private SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCDataToSync(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel.java new file mode 100755 index 000000000..729e7c30d --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel.java @@ -0,0 +1,22 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel - Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the <a href="https://docs.microsoft.com/en-us/dotnet/framework/data/adonet/sql/snapshot-isolation-in-sql-server">snapshot isolation mode</a> on the database. + */ +public enum SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel { + SNAPSHOT("Snapshot"), + READ_COMMITTED("Read Committed"); + + @JsonValue + public final String value; + + private SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod.java new file mode 100755 index 000000000..2fdf0032c --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod { + CDC("CDC"); + + @JsonValue + public final String value; + + private SourceMssqlReplicationMethodReadChangesUsingChangeDataCaptureCDCMethod(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor.java new file mode 100755 index 000000000..0322cabab --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor.java @@ -0,0 +1,25 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the <a href="https://docs.airbyte.com/understanding-airbyte/connections/incremental-append/#user-defined-cursor">cursor column</a> chosen when configuring a connection (e.g. created_at, updated_at). + */ + +public class SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor { + @JsonProperty("method") + public SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod method; + + public SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor withMethod(SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod method) { + this.method = method; + return this; + } + + public SourceMssqlReplicationMethodScanChangesWithUserDefinedCursor(@JsonProperty("method") SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod method) { + this.method = method; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod.java new file mode 100755 index 000000000..1ae9f6894 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod { + STANDARD("STANDARD"); + + @JsonValue + public final String value; + + private SourceMssqlReplicationMethodScanChangesWithUserDefinedCursorMethod(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodStandard.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodStandard.java deleted file mode 100755 index 9305bfbe7..000000000 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodStandard.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - */ - -package com.airbyte.api.models.shared; - -import com.fasterxml.jackson.annotation.JsonProperty; - -/** - * SourceMssqlReplicationMethodStandard - Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. - */ - -public class SourceMssqlReplicationMethodStandard { - @JsonProperty("method") - public SourceMssqlReplicationMethodStandardMethod method; - - public SourceMssqlReplicationMethodStandard withMethod(SourceMssqlReplicationMethodStandardMethod method) { - this.method = method; - return this; - } - - public SourceMssqlReplicationMethodStandard(@JsonProperty("method") SourceMssqlReplicationMethodStandardMethod method) { - this.method = method; - } -} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweather.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweather.java deleted file mode 100755 index 24b99e8d7..000000000 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweather.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - */ - -package com.airbyte.api.models.shared; - -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; - -/** - * SourceOpenweather - The values required to configure the source. - */ - -public class SourceOpenweather { - /** - * Your OpenWeather API Key. See <a href="https://openweathermap.org/api">here</a>. The key is case sensitive. - */ - @JsonProperty("appid") - public String appid; - - public SourceOpenweather withAppid(String appid) { - this.appid = appid; - return this; - } - - /** - * You can use lang parameter to get the output in your language. The contents of the description field will be translated. See <a href="https://openweathermap.org/api/one-call-api#multi">here</a> for the list of supported languages. - */ - @JsonInclude(Include.NON_ABSENT) - @JsonProperty("lang") - public SourceOpenweatherLanguage lang; - - public SourceOpenweather withLang(SourceOpenweatherLanguage lang) { - this.lang = lang; - return this; - } - - /** - * Latitude for which you want to get weather condition from. (min -90, max 90) - */ - @JsonProperty("lat") - public String lat; - - public SourceOpenweather withLat(String lat) { - this.lat = lat; - return this; - } - - /** - * Longitude for which you want to get weather condition from. (min -180, max 180) - */ - @JsonProperty("lon") - public String lon; - - public SourceOpenweather withLon(String lon) { - this.lon = lon; - return this; - } - - @JsonProperty("sourceType") - public SourceOpenweatherOpenweather sourceType; - - public SourceOpenweather withSourceType(SourceOpenweatherOpenweather sourceType) { - this.sourceType = sourceType; - return this; - } - - /** - * Units of measurement. standard, metric and imperial units are available. If you do not use the units parameter, standard units will be applied by default. - */ - @JsonInclude(Include.NON_ABSENT) - @JsonProperty("units") - public SourceOpenweatherUnits units; - - public SourceOpenweather withUnits(SourceOpenweatherUnits units) { - this.units = units; - return this; - } - - public SourceOpenweather(@JsonProperty("appid") String appid, @JsonProperty("lat") String lat, @JsonProperty("lon") String lon, @JsonProperty("sourceType") SourceOpenweatherOpenweather sourceType) { - this.appid = appid; - this.lat = lat; - this.lon = lon; - this.sourceType = sourceType; - } -} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweatherLanguage.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweatherLanguage.java deleted file mode 100755 index 6d7baace0..000000000 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweatherLanguage.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - */ - -package com.airbyte.api.models.shared; - -import com.fasterxml.jackson.annotation.JsonValue; - -/** - * SourceOpenweatherLanguage - You can use lang parameter to get the output in your language. The contents of the description field will be translated. See <a href="https://openweathermap.org/api/one-call-api#multi">here</a> for the list of supported languages. - */ -public enum SourceOpenweatherLanguage { - AF("af"), - AL("al"), - AR("ar"), - AZ("az"), - BG("bg"), - CA("ca"), - CZ("cz"), - DA("da"), - DE("de"), - EL("el"), - EN("en"), - EU("eu"), - FA("fa"), - FI("fi"), - FR("fr"), - GL("gl"), - HE("he"), - HI("hi"), - HR("hr"), - HU("hu"), - ID("id"), - IT("it"), - JA("ja"), - KR("kr"), - LA("la"), - LT("lt"), - MK("mk"), - NO("no"), - NL("nl"), - PL("pl"), - PT("pt"), - PT_BR("pt_br"), - RO("ro"), - RU("ru"), - SV("sv"), - SE("se"), - SK("sk"), - SL("sl"), - SP("sp"), - ES("es"), - SR("sr"), - TH("th"), - TR("tr"), - UA("ua"), - UK("uk"), - VI("vi"), - ZH_CN("zh_cn"), - ZH_TW("zh_tw"), - ZU("zu"); - - @JsonValue - public final String value; - - private SourceOpenweatherLanguage(String value) { - this.value = value; - } -} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweatherUnits.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweatherUnits.java deleted file mode 100755 index f21e00571..000000000 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceOpenweatherUnits.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - */ - -package com.airbyte.api.models.shared; - -import com.fasterxml.jackson.annotation.JsonValue; - -/** - * SourceOpenweatherUnits - Units of measurement. standard, metric and imperial units are available. If you do not use the units parameter, standard units will be applied by default. - */ -public enum SourceOpenweatherUnits { - STANDARD("standard"), - METRIC("metric"), - IMPERIAL("imperial"); - - @JsonValue - public final String value; - - private SourceOpenweatherUnits(String value) { - this.value = value; - } -} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgres.java b/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgres.java index 8f172a1c4..1804ddf9e 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgres.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgres.java @@ -71,7 +71,7 @@ public SourcePostgres withPort(Long port) { } /** - * Replication method for extracting data from the database. + * Configures how data is extracted from the database. */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("replication_method") diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn.java b/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn.java new file mode 100755 index 000000000..ca2592c17 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn.java @@ -0,0 +1,25 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn - <i>Recommended</i> - Incrementally reads new inserts and updates via Postgres <a href="https://docs.airbyte.com/integrations/sources/postgres/#xmin">Xmin system column</a>. Only recommended for tables up to 500GB. + */ + +public class SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn { + @JsonProperty("method") + public SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod method; + + public SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn withMethod(SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod method) { + this.method = method; + return this; + } + + public SourcePostgresReplicationMethodDetectChangesWithXminSystemColumn(@JsonProperty("method") SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod method) { + this.method = method; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod.java b/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod.java new file mode 100755 index 000000000..884f156b3 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod { + XMIN("Xmin"); + + @JsonValue + public final String value; + + private SourcePostgresReplicationMethodDetectChangesWithXminSystemColumnMethod(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor.java b/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor.java new file mode 100755 index 000000000..37f2023b0 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor.java @@ -0,0 +1,25 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the <a href="https://docs.airbyte.com/understanding-airbyte/connections/incremental-append/#user-defined-cursor">cursor column</a> chosen when configuring a connection (e.g. created_at, updated_at). + */ + +public class SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor { + @JsonProperty("method") + public SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod method; + + public SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor withMethod(SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod method) { + this.method = method; + return this; + } + + public SourcePostgresReplicationMethodScanChangesWithUserDefinedCursor(@JsonProperty("method") SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod method) { + this.method = method; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod.java b/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod.java new file mode 100755 index 000000000..c62696591 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod { + STANDARD("Standard"); + + @JsonValue + public final String value; + + private SourcePostgresReplicationMethodScanChangesWithUserDefinedCursorMethod(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandard.java b/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandard.java deleted file mode 100755 index 528d30379..000000000 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandard.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - */ - -package com.airbyte.api.models.shared; - -import com.fasterxml.jackson.annotation.JsonProperty; - -/** - * SourcePostgresReplicationMethodStandard - Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. - */ - -public class SourcePostgresReplicationMethodStandard { - @JsonProperty("method") - public SourcePostgresReplicationMethodStandardMethod method; - - public SourcePostgresReplicationMethodStandard withMethod(SourcePostgresReplicationMethodStandardMethod method) { - this.method = method; - return this; - } - - public SourcePostgresReplicationMethodStandard(@JsonProperty("method") SourcePostgresReplicationMethodStandardMethod method) { - this.method = method; - } -} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandardXmin.java b/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandardXmin.java deleted file mode 100755 index 63958c72d..000000000 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandardXmin.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - */ - -package com.airbyte.api.models.shared; - -import com.fasterxml.jackson.annotation.JsonProperty; - -/** - * SourcePostgresReplicationMethodStandardXmin - Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally. - */ - -public class SourcePostgresReplicationMethodStandardXmin { - @JsonProperty("method") - public SourcePostgresReplicationMethodStandardXminMethod method; - - public SourcePostgresReplicationMethodStandardXmin withMethod(SourcePostgresReplicationMethodStandardXminMethod method) { - this.method = method; - return this; - } - - public SourcePostgresReplicationMethodStandardXmin(@JsonProperty("method") SourcePostgresReplicationMethodStandardXminMethod method) { - this.method = method; - } -} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourcePosthog.java b/lib/src/main/java/com/airbyte/api/models/shared/SourcePosthog.java index e0199533d..d062b8bed 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourcePosthog.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourcePosthog.java @@ -41,6 +41,18 @@ public SourcePosthog withBaseUrl(String baseUrl) { return this; } + /** + * Set lower value in case of failing long running sync of events stream. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("events_time_step") + public Long eventsTimeStep; + + public SourcePosthog withEventsTimeStep(Long eventsTimeStep) { + this.eventsTimeStep = eventsTimeStep; + return this; + } + @JsonProperty("sourceType") public SourcePosthogPosthog sourceType; diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourcePublicApis.java b/lib/src/main/java/com/airbyte/api/models/shared/SourcePublicApis.java deleted file mode 100755 index 8e4455237..000000000 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourcePublicApis.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - */ - -package com.airbyte.api.models.shared; - -import com.fasterxml.jackson.annotation.JsonProperty; - -/** - * SourcePublicApis - The values required to configure the source. - */ - -public class SourcePublicApis { - @JsonProperty("sourceType") - public SourcePublicApisPublicApis sourceType; - - public SourcePublicApis withSourceType(SourcePublicApisPublicApis sourceType) { - this.sourceType = sourceType; - return this; - } - - public SourcePublicApis(@JsonProperty("sourceType") SourcePublicApisPublicApis sourceType) { - this.sourceType = sourceType; - } -} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3.java index c82bb669f..ed8f1dcc7 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3.java @@ -4,18 +4,60 @@ package com.airbyte.api.models.shared; +import com.airbyte.api.utils.DateTimeDeserializer; +import com.airbyte.api.utils.DateTimeSerializer; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.time.OffsetDateTime; /** - * SourceS3 - The values required to configure the source. + * SourceS3 - NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes + * because it is responsible for converting legacy S3 v3 configs into v4 configs using the File-Based CDK. */ public class SourceS3 { /** - * The name of the stream you would like this source to output. Can contain letters, numbers, or underscores. + * In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("aws_access_key_id") + public String awsAccessKeyId; + + public SourceS3 withAwsAccessKeyId(String awsAccessKeyId) { + this.awsAccessKeyId = awsAccessKeyId; + return this; + } + + /** + * In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("aws_secret_access_key") + public String awsSecretAccessKey; + + public SourceS3 withAwsSecretAccessKey(String awsSecretAccessKey) { + this.awsSecretAccessKey = awsSecretAccessKey; + return this; + } + + /** + * Name of the S3 bucket where the file(s) exist. + */ + @JsonProperty("bucket") + public String bucket; + + public SourceS3 withBucket(String bucket) { + this.bucket = bucket; + return this; + } + + /** + * Deprecated and will be removed soon. Please do not use this field anymore and use streams.name instead. The name of the stream you would like this source to output. Can contain letters, numbers, or underscores. + */ + @JsonInclude(Include.NON_ABSENT) @JsonProperty("dataset") public String dataset; @@ -25,7 +67,19 @@ public SourceS3 withDataset(String dataset) { } /** - * The format of the files you'd like to replicate + * Endpoint to an S3 compatible service. Leave empty to use AWS. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("endpoint") + public String endpoint; + + public SourceS3 withEndpoint(String endpoint) { + this.endpoint = endpoint; + return this; + } + + /** + * Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("format") @@ -37,8 +91,9 @@ public SourceS3 withFormat(Object format) { } /** - * A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See <a href="https://facelessuser.github.io/wcmatch/glob/" target="_blank">this page</a> to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern <strong>**</strong> to pick up all files. + * Deprecated and will be removed soon. Please do not use this field anymore and use streams.globs instead. A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See <a href="https://facelessuser.github.io/wcmatch/glob/" target="_blank">this page</a> to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern <strong>**</strong> to pick up all files. */ + @JsonInclude(Include.NON_ABSENT) @JsonProperty("path_pattern") public String pathPattern; @@ -48,8 +103,9 @@ public SourceS3 withPathPattern(String pathPattern) { } /** - * Use this to load files from S3 or S3-compatible services + * Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services */ + @JsonInclude(Include.NON_ABSENT) @JsonProperty("provider") public SourceS3S3AmazonWebServices provider; @@ -59,7 +115,7 @@ public SourceS3 withProvider(SourceS3S3AmazonWebServices provider) { } /** - * Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of <strong>{ "column" : "type" }</strong>, where types are valid <a href="https://json-schema.org/understanding-json-schema/reference/type.html" target="_blank">JSON Schema datatypes</a>. Leave as {} to auto-infer the schema. + * Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of <strong>{ "column" : "type" }</strong>, where types are valid <a href="https://json-schema.org/understanding-json-schema/reference/type.html" target="_blank">JSON Schema datatypes</a>. Leave as {} to auto-infer the schema. */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("schema") @@ -78,10 +134,34 @@ public SourceS3 withSourceType(SourceS3S3 sourceType) { return this; } - public SourceS3(@JsonProperty("dataset") String dataset, @JsonProperty("path_pattern") String pathPattern, @JsonProperty("provider") SourceS3S3AmazonWebServices provider, @JsonProperty("sourceType") SourceS3S3 sourceType) { - this.dataset = dataset; - this.pathPattern = pathPattern; - this.provider = provider; + /** + * UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonSerialize(using = DateTimeSerializer.class) + @JsonDeserialize(using = DateTimeDeserializer.class) + @JsonProperty("start_date") + public OffsetDateTime startDate; + + public SourceS3 withStartDate(OffsetDateTime startDate) { + this.startDate = startDate; + return this; + } + + /** + * Each instance of this configuration defines a <a href="https://docs.airbyte.com/cloud/core-concepts#stream">stream</a>. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. + */ + @JsonProperty("streams") + public SourceS3FileBasedStreamConfig[] streams; + + public SourceS3 withStreams(SourceS3FileBasedStreamConfig[] streams) { + this.streams = streams; + return this; + } + + public SourceS3(@JsonProperty("bucket") String bucket, @JsonProperty("sourceType") SourceS3S3 sourceType, @JsonProperty("streams") SourceS3FileBasedStreamConfig[] streams) { + this.bucket = bucket; this.sourceType = sourceType; + this.streams = streams; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfig.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfig.java new file mode 100755 index 000000000..7feb98cde --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfig.java @@ -0,0 +1,135 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + + +public class SourceS3FileBasedStreamConfig { + /** + * When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("days_to_sync_if_history_is_full") + public Long daysToSyncIfHistoryIsFull; + + public SourceS3FileBasedStreamConfig withDaysToSyncIfHistoryIsFull(Long daysToSyncIfHistoryIsFull) { + this.daysToSyncIfHistoryIsFull = daysToSyncIfHistoryIsFull; + return this; + } + + /** + * The data file type that is being extracted for a stream. + */ + @JsonProperty("file_type") + public String fileType; + + public SourceS3FileBasedStreamConfig withFileType(String fileType) { + this.fileType = fileType; + return this; + } + + /** + * The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("format") + public Object format; + + public SourceS3FileBasedStreamConfig withFormat(Object format) { + this.format = format; + return this; + } + + /** + * The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look <a href="https://en.wikipedia.org/wiki/Glob_(programming)">here</a>. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("globs") + public String[] globs; + + public SourceS3FileBasedStreamConfig withGlobs(String[] globs) { + this.globs = globs; + return this; + } + + /** + * The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("input_schema") + public String inputSchema; + + public SourceS3FileBasedStreamConfig withInputSchema(String inputSchema) { + this.inputSchema = inputSchema; + return this; + } + + /** + * The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("legacy_prefix") + public String legacyPrefix; + + public SourceS3FileBasedStreamConfig withLegacyPrefix(String legacyPrefix) { + this.legacyPrefix = legacyPrefix; + return this; + } + + /** + * The name of the stream. + */ + @JsonProperty("name") + public String name; + + public SourceS3FileBasedStreamConfig withName(String name) { + this.name = name; + return this; + } + + /** + * The column or columns (for a composite key) that serves as the unique identifier of a record. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("primary_key") + public String primaryKey; + + public SourceS3FileBasedStreamConfig withPrimaryKey(String primaryKey) { + this.primaryKey = primaryKey; + return this; + } + + /** + * When enabled, syncs will not validate or structure records against the stream's schema. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("schemaless") + public Boolean schemaless; + + public SourceS3FileBasedStreamConfig withSchemaless(Boolean schemaless) { + this.schemaless = schemaless; + return this; + } + + /** + * The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("validation_policy") + public SourceS3FileBasedStreamConfigValidationPolicy validationPolicy; + + public SourceS3FileBasedStreamConfig withValidationPolicy(SourceS3FileBasedStreamConfigValidationPolicy validationPolicy) { + this.validationPolicy = validationPolicy; + return this; + } + + public SourceS3FileBasedStreamConfig(@JsonProperty("file_type") String fileType, @JsonProperty("name") String name) { + this.fileType = fileType; + this.name = name; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormat.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormat.java new file mode 100755 index 000000000..302e1de95 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormat.java @@ -0,0 +1,38 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * SourceS3FileBasedStreamConfigFormatAvroFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. + */ + +public class SourceS3FileBasedStreamConfigFormatAvroFormat { + /** + * Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("double_as_string") + public Boolean doubleAsString; + + public SourceS3FileBasedStreamConfigFormatAvroFormat withDoubleAsString(Boolean doubleAsString) { + this.doubleAsString = doubleAsString; + return this; + } + + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("filetype") + public SourceS3FileBasedStreamConfigFormatAvroFormatFiletype filetype; + + public SourceS3FileBasedStreamConfigFormatAvroFormat withFiletype(SourceS3FileBasedStreamConfigFormatAvroFormatFiletype filetype) { + this.filetype = filetype; + return this; + } + + public SourceS3FileBasedStreamConfigFormatAvroFormat(){} +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormatFiletype.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormatFiletype.java new file mode 100755 index 000000000..a0e4947cb --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatAvroFormatFiletype.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum SourceS3FileBasedStreamConfigFormatAvroFormatFiletype { + AVRO("avro"); + + @JsonValue + public final String value; + + private SourceS3FileBasedStreamConfigFormatAvroFormatFiletype(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormat.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormat.java new file mode 100755 index 000000000..c1293e061 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormat.java @@ -0,0 +1,182 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * SourceS3FileBasedStreamConfigFormatCSVFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. + */ + +public class SourceS3FileBasedStreamConfigFormatCSVFormat { + /** + * The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("delimiter") + public String delimiter; + + public SourceS3FileBasedStreamConfigFormatCSVFormat withDelimiter(String delimiter) { + this.delimiter = delimiter; + return this; + } + + /** + * Whether two quotes in a quoted CSV value denote a single quote in the data. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("double_quote") + public Boolean doubleQuote; + + public SourceS3FileBasedStreamConfigFormatCSVFormat withDoubleQuote(Boolean doubleQuote) { + this.doubleQuote = doubleQuote; + return this; + } + + /** + * The character encoding of the CSV data. Leave blank to default to <strong>UTF8</strong>. See <a href="https://docs.python.org/3/library/codecs.html#standard-encodings" target="_blank">list of python encodings</a> for allowable options. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("encoding") + public String encoding; + + public SourceS3FileBasedStreamConfigFormatCSVFormat withEncoding(String encoding) { + this.encoding = encoding; + return this; + } + + /** + * The character used for escaping special characters. To disallow escaping, leave this field blank. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("escape_char") + public String escapeChar; + + public SourceS3FileBasedStreamConfigFormatCSVFormat withEscapeChar(String escapeChar) { + this.escapeChar = escapeChar; + return this; + } + + /** + * A set of case-sensitive strings that should be interpreted as false values. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("false_values") + public String[] falseValues; + + public SourceS3FileBasedStreamConfigFormatCSVFormat withFalseValues(String[] falseValues) { + this.falseValues = falseValues; + return this; + } + + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("filetype") + public SourceS3FileBasedStreamConfigFormatCSVFormatFiletype filetype; + + public SourceS3FileBasedStreamConfigFormatCSVFormat withFiletype(SourceS3FileBasedStreamConfigFormatCSVFormatFiletype filetype) { + this.filetype = filetype; + return this; + } + + /** + * How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("header_definition") + public Object headerDefinition; + + public SourceS3FileBasedStreamConfigFormatCSVFormat withHeaderDefinition(Object headerDefinition) { + this.headerDefinition = headerDefinition; + return this; + } + + /** + * How to infer the types of the columns. If none, inference default to strings. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("inference_type") + public SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType inferenceType; + + public SourceS3FileBasedStreamConfigFormatCSVFormat withInferenceType(SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType inferenceType) { + this.inferenceType = inferenceType; + return this; + } + + /** + * A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("null_values") + public String[] nullValues; + + public SourceS3FileBasedStreamConfigFormatCSVFormat withNullValues(String[] nullValues) { + this.nullValues = nullValues; + return this; + } + + /** + * The character used for quoting CSV values. To disallow quoting, make this field blank. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("quote_char") + public String quoteChar; + + public SourceS3FileBasedStreamConfigFormatCSVFormat withQuoteChar(String quoteChar) { + this.quoteChar = quoteChar; + return this; + } + + /** + * The number of rows to skip after the header row. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("skip_rows_after_header") + public Long skipRowsAfterHeader; + + public SourceS3FileBasedStreamConfigFormatCSVFormat withSkipRowsAfterHeader(Long skipRowsAfterHeader) { + this.skipRowsAfterHeader = skipRowsAfterHeader; + return this; + } + + /** + * The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("skip_rows_before_header") + public Long skipRowsBeforeHeader; + + public SourceS3FileBasedStreamConfigFormatCSVFormat withSkipRowsBeforeHeader(Long skipRowsBeforeHeader) { + this.skipRowsBeforeHeader = skipRowsBeforeHeader; + return this; + } + + /** + * Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("strings_can_be_null") + public Boolean stringsCanBeNull; + + public SourceS3FileBasedStreamConfigFormatCSVFormat withStringsCanBeNull(Boolean stringsCanBeNull) { + this.stringsCanBeNull = stringsCanBeNull; + return this; + } + + /** + * A set of case-sensitive strings that should be interpreted as true values. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("true_values") + public String[] trueValues; + + public SourceS3FileBasedStreamConfigFormatCSVFormat withTrueValues(String[] trueValues) { + this.trueValues = trueValues; + return this; + } + + public SourceS3FileBasedStreamConfigFormatCSVFormat(){} +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandardMethod.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatFiletype.java similarity index 61% rename from lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandardMethod.java rename to lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatFiletype.java index 0a8908130..1b0db6e8c 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandardMethod.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatFiletype.java @@ -6,13 +6,13 @@ import com.fasterxml.jackson.annotation.JsonValue; -public enum SourcePostgresReplicationMethodStandardMethod { - STANDARD("Standard"); +public enum SourceS3FileBasedStreamConfigFormatCSVFormatFiletype { + CSV("csv"); @JsonValue public final String value; - private SourcePostgresReplicationMethodStandardMethod(String value) { + private SourceS3FileBasedStreamConfigFormatCSVFormatFiletype(String value) { this.value = value; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated.java new file mode 100755 index 000000000..6b2f9d9f7 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated.java @@ -0,0 +1,26 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. + */ + +public class SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated { + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("header_definition_type") + public SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType headerDefinitionType; + + public SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated withHeaderDefinitionType(SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType headerDefinitionType) { + this.headerDefinitionType = headerDefinitionType; + return this; + } + + public SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogenerated(){} +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType.java new file mode 100755 index 000000000..37d116d83 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType { + AUTOGENERATED("Autogenerated"); + + @JsonValue + public final String value; + + private SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionAutogeneratedHeaderDefinitionType(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV.java new file mode 100755 index 000000000..4f56cb2dd --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV.java @@ -0,0 +1,26 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. + */ + +public class SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV { + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("header_definition_type") + public SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType headerDefinitionType; + + public SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV withHeaderDefinitionType(SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType headerDefinitionType) { + this.headerDefinitionType = headerDefinitionType; + return this; + } + + public SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSV(){} +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType.java new file mode 100755 index 000000000..a4702c3f8 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType { + FROM_CSV("From CSV"); + + @JsonValue + public final String value; + + private SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionFromCSVHeaderDefinitionType(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided.java new file mode 100755 index 000000000..7d5551e9b --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided.java @@ -0,0 +1,39 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. + */ + +public class SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided { + /** + * The column names that will be used while emitting the CSV records + */ + @JsonProperty("column_names") + public String[] columnNames; + + public SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided withColumnNames(String[] columnNames) { + this.columnNames = columnNames; + return this; + } + + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("header_definition_type") + public SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType headerDefinitionType; + + public SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided withHeaderDefinitionType(SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType headerDefinitionType) { + this.headerDefinitionType = headerDefinitionType; + return this; + } + + public SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvided(@JsonProperty("column_names") String[] columnNames) { + this.columnNames = columnNames; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType.java new file mode 100755 index 000000000..71dc033a5 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType { + USER_PROVIDED("User Provided"); + + @JsonValue + public final String value; + + private SourceS3FileBasedStreamConfigFormatCSVFormatHeaderDefinitionUserProvidedHeaderDefinitionType(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType.java new file mode 100755 index 000000000..09479e21c --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType.java @@ -0,0 +1,22 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType - How to infer the types of the columns. If none, inference default to strings. + */ +public enum SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType { + NONE("None"), + PRIMITIVE_TYPES_ONLY("Primitive Types Only"); + + @JsonValue + public final String value; + + private SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormat.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormat.java new file mode 100755 index 000000000..3ddd325e1 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormat.java @@ -0,0 +1,26 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * SourceS3FileBasedStreamConfigFormatJsonlFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. + */ + +public class SourceS3FileBasedStreamConfigFormatJsonlFormat { + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("filetype") + public SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype filetype; + + public SourceS3FileBasedStreamConfigFormatJsonlFormat withFiletype(SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype filetype) { + this.filetype = filetype; + return this; + } + + public SourceS3FileBasedStreamConfigFormatJsonlFormat(){} +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCMethod.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype.java similarity index 59% rename from lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCMethod.java rename to lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype.java index 3e26228c0..5151b6f86 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceMssqlReplicationMethodLogicalReplicationCDCMethod.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype.java @@ -6,13 +6,13 @@ import com.fasterxml.jackson.annotation.JsonValue; -public enum SourceMssqlReplicationMethodLogicalReplicationCDCMethod { - CDC("CDC"); +public enum SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype { + JSONL("jsonl"); @JsonValue public final String value; - private SourceMssqlReplicationMethodLogicalReplicationCDCMethod(String value) { + private SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype(String value) { this.value = value; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormat.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormat.java new file mode 100755 index 000000000..57a02dbf5 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormat.java @@ -0,0 +1,38 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * SourceS3FileBasedStreamConfigFormatParquetFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. + */ + +public class SourceS3FileBasedStreamConfigFormatParquetFormat { + /** + * Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. + */ + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("decimal_as_float") + public Boolean decimalAsFloat; + + public SourceS3FileBasedStreamConfigFormatParquetFormat withDecimalAsFloat(Boolean decimalAsFloat) { + this.decimalAsFloat = decimalAsFloat; + return this; + } + + @JsonInclude(Include.NON_ABSENT) + @JsonProperty("filetype") + public SourceS3FileBasedStreamConfigFormatParquetFormatFiletype filetype; + + public SourceS3FileBasedStreamConfigFormatParquetFormat withFiletype(SourceS3FileBasedStreamConfigFormatParquetFormatFiletype filetype) { + this.filetype = filetype; + return this; + } + + public SourceS3FileBasedStreamConfigFormatParquetFormat(){} +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormatFiletype.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormatFiletype.java new file mode 100755 index 000000000..8924b5df0 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigFormatParquetFormatFiletype.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum SourceS3FileBasedStreamConfigFormatParquetFormatFiletype { + PARQUET("parquet"); + + @JsonValue + public final String value; + + private SourceS3FileBasedStreamConfigFormatParquetFormatFiletype(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigValidationPolicy.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigValidationPolicy.java new file mode 100755 index 000000000..c96052a95 --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3FileBasedStreamConfigValidationPolicy.java @@ -0,0 +1,23 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * SourceS3FileBasedStreamConfigValidationPolicy - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. + */ +public enum SourceS3FileBasedStreamConfigValidationPolicy { + EMIT_RECORD("Emit Record"), + SKIP_RECORD("Skip Record"), + WAIT_FOR_DISCOVER("Wait for Discover"); + + @JsonValue + public final String value; + + private SourceS3FileBasedStreamConfigValidationPolicy(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3S3AmazonWebServices.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3S3AmazonWebServices.java index 88bf16995..1ff2c2e43 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceS3S3AmazonWebServices.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceS3S3AmazonWebServices.java @@ -14,7 +14,7 @@ import java.time.OffsetDateTime; /** - * SourceS3S3AmazonWebServices - Use this to load files from S3 or S3-compatible services + * SourceS3S3AmazonWebServices - Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services */ public class SourceS3S3AmazonWebServices { @@ -45,6 +45,7 @@ public SourceS3S3AmazonWebServices withAwsSecretAccessKey(String awsSecretAccess /** * Name of the S3 bucket where the file(s) exist. */ + @JsonInclude(Include.NON_ABSENT) @JsonProperty("bucket") public String bucket; @@ -91,7 +92,5 @@ public SourceS3S3AmazonWebServices withStartDate(OffsetDateTime startDate) { return this; } - public SourceS3S3AmazonWebServices(@JsonProperty("bucket") String bucket) { - this.bucket = bucket; - } + public SourceS3S3AmazonWebServices(){} } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceShopify.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceShopify.java index 3ad47effa..0e6c39ecf 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceShopify.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceShopify.java @@ -49,6 +49,7 @@ public SourceShopify withSourceType(SourceShopifyShopify sourceType) { /** * The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated. */ + @JsonInclude(Include.NON_ABSENT) @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd") @JsonProperty("start_date") public LocalDate startDate; @@ -58,9 +59,8 @@ public SourceShopify withStartDate(LocalDate startDate) { return this; } - public SourceShopify(@JsonProperty("shop") String shop, @JsonProperty("sourceType") SourceShopifyShopify sourceType, @JsonProperty("start_date") LocalDate startDate) { + public SourceShopify(@JsonProperty("shop") String shop, @JsonProperty("sourceType") SourceShopifyShopify sourceType) { this.shop = shop; this.sourceType = sourceType; - this.startDate = startDate; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceStripe.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceStripe.java index d6062475d..86ff4c7af 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceStripe.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceStripe.java @@ -41,7 +41,7 @@ public SourceStripe withClientSecret(String clientSecret) { } /** - * When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. More info <a href="https://docs.airbyte.com/integrations/sources/stripe#requirements">here</a> + * When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. Applies only to streams that do not support event-based incremental syncs: CheckoutSessionLineItems, Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks. More info <a href="https://docs.airbyte.com/integrations/sources/stripe#requirements">here</a> */ @JsonInclude(Include.NON_ABSENT) @JsonProperty("lookback_window_days") @@ -75,6 +75,7 @@ public SourceStripe withSourceType(SourceStripeStripe sourceType) { /** * UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated. */ + @JsonInclude(Include.NON_ABSENT) @JsonSerialize(using = DateTimeSerializer.class) @JsonDeserialize(using = DateTimeDeserializer.class) @JsonProperty("start_date") @@ -85,10 +86,9 @@ public SourceStripe withStartDate(OffsetDateTime startDate) { return this; } - public SourceStripe(@JsonProperty("account_id") String accountId, @JsonProperty("client_secret") String clientSecret, @JsonProperty("sourceType") SourceStripeStripe sourceType, @JsonProperty("start_date") OffsetDateTime startDate) { + public SourceStripe(@JsonProperty("account_id") String accountId, @JsonProperty("client_secret") String clientSecret, @JsonProperty("sourceType") SourceStripeStripe sourceType) { this.accountId = accountId; this.clientSecret = clientSecret; this.sourceType = sourceType; - this.startDate = startDate; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshine.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshine.java index 538474801..f279668b9 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshine.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshine.java @@ -4,9 +4,14 @@ package com.airbyte.api.models.shared; +import com.airbyte.api.utils.DateTimeDeserializer; +import com.airbyte.api.utils.DateTimeSerializer; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import java.time.OffsetDateTime; /** * SourceZendeskSunshine - The values required to configure the source. @@ -33,10 +38,12 @@ public SourceZendeskSunshine withSourceType(SourceZendeskSunshineZendeskSunshine /** * The date from which you'd like to replicate data for Zendesk Sunshine API, in the format YYYY-MM-DDT00:00:00Z. */ + @JsonSerialize(using = DateTimeSerializer.class) + @JsonDeserialize(using = DateTimeDeserializer.class) @JsonProperty("start_date") - public String startDate; + public OffsetDateTime startDate; - public SourceZendeskSunshine withStartDate(String startDate) { + public SourceZendeskSunshine withStartDate(OffsetDateTime startDate) { this.startDate = startDate; return this; } @@ -52,7 +59,7 @@ public SourceZendeskSunshine withSubdomain(String subdomain) { return this; } - public SourceZendeskSunshine(@JsonProperty("sourceType") SourceZendeskSunshineZendeskSunshine sourceType, @JsonProperty("start_date") String startDate, @JsonProperty("subdomain") String subdomain) { + public SourceZendeskSunshine(@JsonProperty("sourceType") SourceZendeskSunshineZendeskSunshine sourceType, @JsonProperty("start_date") OffsetDateTime startDate, @JsonProperty("subdomain") String subdomain) { this.sourceType = sourceType; this.startDate = startDate; this.subdomain = subdomain; diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsAPIToken.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsAPIToken.java new file mode 100755 index 000000000..a7e33b42c --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsAPIToken.java @@ -0,0 +1,46 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonProperty; + + +public class SourceZendeskSunshineCredentialsAPIToken { + /** + * API Token. See the <a href="https://docs.airbyte.com/integrations/sources/zendesk_sunshine">docs</a> for information on how to generate this key. + */ + @JsonProperty("api_token") + public String apiToken; + + public SourceZendeskSunshineCredentialsAPIToken withApiToken(String apiToken) { + this.apiToken = apiToken; + return this; + } + + @JsonProperty("auth_method") + public SourceZendeskSunshineCredentialsAPITokenAuthMethod authMethod; + + public SourceZendeskSunshineCredentialsAPIToken withAuthMethod(SourceZendeskSunshineCredentialsAPITokenAuthMethod authMethod) { + this.authMethod = authMethod; + return this; + } + + /** + * The user email for your Zendesk account + */ + @JsonProperty("email") + public String email; + + public SourceZendeskSunshineCredentialsAPIToken withEmail(String email) { + this.email = email; + return this; + } + + public SourceZendeskSunshineCredentialsAPIToken(@JsonProperty("api_token") String apiToken, @JsonProperty("auth_method") SourceZendeskSunshineCredentialsAPITokenAuthMethod authMethod, @JsonProperty("email") String email) { + this.apiToken = apiToken; + this.authMethod = authMethod; + this.email = email; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsAPITokenAuthMethod.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsAPITokenAuthMethod.java new file mode 100755 index 000000000..2e8e4f8dd --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsAPITokenAuthMethod.java @@ -0,0 +1,18 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonValue; + +public enum SourceZendeskSunshineCredentialsAPITokenAuthMethod { + API_TOKEN("api_token"); + + @JsonValue + public final String value; + + private SourceZendeskSunshineCredentialsAPITokenAuthMethod(String value) { + this.value = value; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsOAuth20.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsOAuth20.java new file mode 100755 index 000000000..a0d244b5b --- /dev/null +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsOAuth20.java @@ -0,0 +1,58 @@ +/* + * Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + */ + +package com.airbyte.api.models.shared; + +import com.fasterxml.jackson.annotation.JsonProperty; + + +public class SourceZendeskSunshineCredentialsOAuth20 { + /** + * Long-term access Token for making authenticated requests. + */ + @JsonProperty("access_token") + public String accessToken; + + public SourceZendeskSunshineCredentialsOAuth20 withAccessToken(String accessToken) { + this.accessToken = accessToken; + return this; + } + + @JsonProperty("auth_method") + public SourceZendeskSunshineCredentialsOAuth20AuthMethod authMethod; + + public SourceZendeskSunshineCredentialsOAuth20 withAuthMethod(SourceZendeskSunshineCredentialsOAuth20AuthMethod authMethod) { + this.authMethod = authMethod; + return this; + } + + /** + * The Client ID of your OAuth application. + */ + @JsonProperty("client_id") + public String clientId; + + public SourceZendeskSunshineCredentialsOAuth20 withClientId(String clientId) { + this.clientId = clientId; + return this; + } + + /** + * The Client Secret of your OAuth application. + */ + @JsonProperty("client_secret") + public String clientSecret; + + public SourceZendeskSunshineCredentialsOAuth20 withClientSecret(String clientSecret) { + this.clientSecret = clientSecret; + return this; + } + + public SourceZendeskSunshineCredentialsOAuth20(@JsonProperty("access_token") String accessToken, @JsonProperty("auth_method") SourceZendeskSunshineCredentialsOAuth20AuthMethod authMethod, @JsonProperty("client_id") String clientId, @JsonProperty("client_secret") String clientSecret) { + this.accessToken = accessToken; + this.authMethod = authMethod; + this.clientId = clientId; + this.clientSecret = clientSecret; + } +} diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandardXminMethod.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsOAuth20AuthMethod.java similarity index 63% rename from lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandardXminMethod.java rename to lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsOAuth20AuthMethod.java index 1837bbf82..93bacd41c 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourcePostgresReplicationMethodStandardXminMethod.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSunshineCredentialsOAuth20AuthMethod.java @@ -6,13 +6,13 @@ import com.fasterxml.jackson.annotation.JsonValue; -public enum SourcePostgresReplicationMethodStandardXminMethod { - XMIN("Xmin"); +public enum SourceZendeskSunshineCredentialsOAuth20AuthMethod { + OAUTH20("oauth2.0"); @JsonValue public final String value; - private SourcePostgresReplicationMethodStandardXminMethod(String value) { + private SourceZendeskSunshineCredentialsOAuth20AuthMethod(String value) { this.value = value; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSupport.java b/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSupport.java index b0dac5719..06e4c6f11 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSupport.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourceZendeskSupport.java @@ -53,6 +53,7 @@ public SourceZendeskSupport withSourceType(SourceZendeskSupportZendeskSupport so /** * The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. */ + @JsonInclude(Include.NON_ABSENT) @JsonSerialize(using = DateTimeSerializer.class) @JsonDeserialize(using = DateTimeDeserializer.class) @JsonProperty("start_date") @@ -74,9 +75,8 @@ public SourceZendeskSupport withSubdomain(String subdomain) { return this; } - public SourceZendeskSupport(@JsonProperty("sourceType") SourceZendeskSupportZendeskSupport sourceType, @JsonProperty("start_date") OffsetDateTime startDate, @JsonProperty("subdomain") String subdomain) { + public SourceZendeskSupport(@JsonProperty("sourceType") SourceZendeskSupportZendeskSupport sourceType, @JsonProperty("subdomain") String subdomain) { this.sourceType = sourceType; - this.startDate = startDate; this.subdomain = subdomain; } } diff --git a/lib/src/main/java/com/airbyte/api/models/shared/SourcesResponse.java b/lib/src/main/java/com/airbyte/api/models/shared/SourcesResponse.java index 4afe1412f..dd9716c98 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/SourcesResponse.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/SourcesResponse.java @@ -8,9 +8,6 @@ import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -/** - * SourcesResponse - Successful operation - */ public class SourcesResponse { @JsonProperty("data") diff --git a/lib/src/main/java/com/airbyte/api/models/shared/WorkspacesResponse.java b/lib/src/main/java/com/airbyte/api/models/shared/WorkspacesResponse.java index b59f2beaf..e5dce7e8f 100755 --- a/lib/src/main/java/com/airbyte/api/models/shared/WorkspacesResponse.java +++ b/lib/src/main/java/com/airbyte/api/models/shared/WorkspacesResponse.java @@ -8,9 +8,6 @@ import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -/** - * WorkspacesResponse - Successful operation - */ public class WorkspacesResponse { @JsonProperty("data")