diff --git a/.speakeasy/gen.lock b/.speakeasy/gen.lock index 263ff0c3..08c5ddf6 100755 --- a/.speakeasy/gen.lock +++ b/.speakeasy/gen.lock @@ -1,12 +1,12 @@ lockVersion: 2.0.0 id: 07961597-3730-4940-9fd0-35eb4118eab3 management: - docChecksum: c154fc44f8133a378fdc9e766757a427 + docChecksum: 9e5bfa57848f3a351e7908c770ab99b5 docVersion: 1.0.0 - speakeasyVersion: 1.453.10 - generationVersion: 2.474.15 - releaseVersion: 0.52.2 - configChecksum: e2ed5a7be8b4d446bfd680d560ec170e + speakeasyVersion: 1.508.0 + generationVersion: 2.536.0 + releaseVersion: 0.53.0 + configChecksum: d4b8483119f7c6e028c35824472c58e6 repoURL: https://github.com/airbytehq/airbyte-api-python-sdk.git repoSubDirectory: . installationURL: https://github.com/airbytehq/airbyte-api-python-sdk.git @@ -16,42 +16,65 @@ features: additionalDependencies: 0.1.0 additionalProperties: 0.1.0 constsAndDefaults: 0.1.4 - core: 4.8.18 - globalSecurity: 2.83.7 + core: 4.8.23 + deprecations: 2.81.1 + globalSecurity: 2.83.8 globalSecurityCallbacks: 0.1.0 globalServerURLs: 2.82.2 groups: 2.81.2 + inputOutputModels: 2.83.1 nullables: 0.1.0 oauth2ClientCredentials: 0.2.0 responseFormat: 0.1.0 sdkHooks: 0.1.0 + typeOverrides: 2.81.1 unions: 2.82.10 generatedFiles: - .gitattributes - - CONTRIBUTING.md - USAGE.md - docs/api/canceljobrequest.md - docs/api/canceljobresponse.md - docs/api/createconnectionresponse.md + - docs/api/createdeclarativesourcedefinitionrequest.md + - docs/api/createdeclarativesourcedefinitionresponse.md + - docs/api/createdestinationdefinitionrequest.md + - docs/api/createdestinationdefinitionresponse.md - docs/api/createdestinationresponse.md - docs/api/createjobresponse.md + - docs/api/createorupdateorganizationoauthcredentialsrequest.md + - docs/api/createorupdateorganizationoauthcredentialsresponse.md - docs/api/createorupdateworkspaceoauthcredentialsrequest.md - docs/api/createorupdateworkspaceoauthcredentialsresponse.md - docs/api/createpermissionresponse.md + - docs/api/createsourcedefinitionrequest.md + - docs/api/createsourcedefinitionresponse.md - docs/api/createsourceresponse.md + - docs/api/createtagresponse.md - docs/api/createworkspaceresponse.md - docs/api/deleteconnectionrequest.md - docs/api/deleteconnectionresponse.md + - docs/api/deletedeclarativesourcedefinitionrequest.md + - docs/api/deletedeclarativesourcedefinitionresponse.md + - docs/api/deletedestinationdefinitionrequest.md + - docs/api/deletedestinationdefinitionresponse.md - docs/api/deletedestinationrequest.md - docs/api/deletedestinationresponse.md - docs/api/deletepermissionrequest.md - docs/api/deletepermissionresponse.md + - docs/api/deletesourcedefinitionrequest.md + - docs/api/deletesourcedefinitionresponse.md - docs/api/deletesourcerequest.md - docs/api/deletesourceresponse.md + - docs/api/deletetagrequest.md + - docs/api/deletetagresponse.md - docs/api/deleteworkspacerequest.md - docs/api/deleteworkspaceresponse.md - docs/api/getconnectionrequest.md - docs/api/getconnectionresponse.md + - docs/api/getdeclarativesourcedefinitionrequest.md + - docs/api/getdeclarativesourcedefinitionresponse.md + - docs/api/getdestinationdefinitionrequest.md + - docs/api/getdestinationdefinitionresponse.md - docs/api/getdestinationrequest.md - docs/api/getdestinationresponse.md - docs/api/gethealthcheckresponse.md @@ -59,15 +82,23 @@ generatedFiles: - docs/api/getjobresponse.md - docs/api/getpermissionrequest.md - docs/api/getpermissionresponse.md + - docs/api/getsourcedefinitionrequest.md + - docs/api/getsourcedefinitionresponse.md - docs/api/getsourcerequest.md - docs/api/getsourceresponse.md - docs/api/getstreampropertiesrequest.md - docs/api/getstreampropertiesresponse.md + - docs/api/gettagrequest.md + - docs/api/gettagresponse.md - docs/api/getworkspacerequest.md - docs/api/getworkspaceresponse.md - docs/api/initiateoauthresponse.md - docs/api/listconnectionsrequest.md - docs/api/listconnectionsresponse.md + - docs/api/listdeclarativesourcedefinitionsrequest.md + - docs/api/listdeclarativesourcedefinitionsresponse.md + - docs/api/listdestinationdefinitionsrequest.md + - docs/api/listdestinationdefinitionsresponse.md - docs/api/listdestinationsrequest.md - docs/api/listdestinationsresponse.md - docs/api/listjobsrequest.md @@ -75,8 +106,12 @@ generatedFiles: - docs/api/listorganizationsforuserresponse.md - docs/api/listpermissionsrequest.md - docs/api/listpermissionsresponse.md + - docs/api/listsourcedefinitionsrequest.md + - docs/api/listsourcedefinitionsresponse.md - docs/api/listsourcesrequest.md - docs/api/listsourcesresponse.md + - docs/api/listtagsrequest.md + - docs/api/listtagsresponse.md - docs/api/listuserswithinanorganizationrequest.md - docs/api/listuserswithinanorganizationresponse.md - docs/api/listworkspacesrequest.md @@ -91,8 +126,16 @@ generatedFiles: - docs/api/putdestinationresponse.md - docs/api/putsourcerequest.md - docs/api/putsourceresponse.md + - docs/api/updatedeclarativesourcedefinitionrequest.md + - docs/api/updatedeclarativesourcedefinitionresponse.md + - docs/api/updatedestinationdefinitionrequest.md + - docs/api/updatedestinationdefinitionresponse.md - docs/api/updatepermissionrequest.md - docs/api/updatepermissionresponse.md + - docs/api/updatesourcedefinitionrequest.md + - docs/api/updatesourcedefinitionresponse.md + - docs/api/updatetagrequest.md + - docs/api/updatetagresponse.md - docs/api/updateworkspacerequest.md - docs/api/updateworkspaceresponse.md - docs/models/accesstoken.md @@ -111,6 +154,8 @@ generatedFiles: - docs/models/akeneo.md - docs/models/algolia.md - docs/models/allow.md + - docs/models/alpacabrokerapi.md + - docs/models/alphavantage.md - docs/models/amazonads.md - docs/models/amazonsellerpartner.md - docs/models/amazonsqs.md @@ -120,6 +165,7 @@ generatedFiles: - docs/models/apiendpoint.md - docs/models/apiendpointprefix.md - docs/models/apifydataset.md + - docs/models/apihost.md - docs/models/apikey.md - docs/models/apikeyauth.md - docs/models/apikeysecret.md @@ -132,15 +178,18 @@ generatedFiles: - docs/models/appfollow.md - docs/models/applesearchads.md - docs/models/applications.md + - docs/models/appsflyer.md - docs/models/apptivo.md - docs/models/asana.md - docs/models/asanacredentials.md - docs/models/ashby.md + - docs/models/assemblyai.md - docs/models/astra.md - docs/models/auth0.md - docs/models/authenticateviaaccesskeys.md - docs/models/authenticateviaapikey.md - docs/models/authenticateviaasanaoauth.md + - docs/models/authenticateviaclientcredentials.md - docs/models/authenticateviafacebookmarketingoauth.md - docs/models/authenticateviagoogleoauth.md - docs/models/authenticateviaharvestoauth.md @@ -163,16 +212,18 @@ generatedFiles: - docs/models/authenticationmethod.md - docs/models/authenticationmode.md - docs/models/authenticationtype.md - - docs/models/authenticationviagoogleoauth.md - docs/models/authenticationwildcard.md - docs/models/authmethod.md - docs/models/authorization.md + - docs/models/authorizationmechanism.md - docs/models/authorizationmethod.md - docs/models/authorizationtype.md - docs/models/authtype.md - docs/models/autogenerated.md + - docs/models/aviationstack.md - docs/models/avroapacheavro.md - docs/models/avroformat.md + - docs/models/awinadvertiser.md - docs/models/awscloudtrail.md - docs/models/awsdatalake.md - docs/models/awsenvironment.md @@ -184,6 +235,7 @@ generatedFiles: - docs/models/azureblobstoragecredentials.md - docs/models/azureopenai.md - docs/models/azuretable.md + - docs/models/babelforce.md - docs/models/bamboohr.md - docs/models/basecamp.md - docs/models/baseurl.md @@ -197,15 +249,19 @@ generatedFiles: - docs/models/bingads.md - docs/models/bitly.md - docs/models/blogger.md + - docs/models/bluetally.md + - docs/models/boldsign.md - docs/models/bothusernameandpasswordisrequiredforauthenticationrequest.md - docs/models/box.md - docs/models/braintree.md - docs/models/braze.md + - docs/models/breezometer.md - docs/models/breezyhr.md - docs/models/brevo.md - docs/models/brex.md - docs/models/bugsnag.md - docs/models/buildkite.md + - docs/models/bulkload.md - docs/models/bunnyinc.md - docs/models/buzzsprout.md - docs/models/bymarkdownheader.md @@ -220,6 +276,7 @@ generatedFiles: - docs/models/campayn.md - docs/models/canny.md - docs/models/capsulecrm.md + - docs/models/captaindata.md - docs/models/capturemodeadvanced.md - docs/models/carequalitycommission.md - docs/models/cart.md @@ -227,6 +284,7 @@ generatedFiles: - docs/models/catalogtype.md - docs/models/categories.md - docs/models/category.md + - docs/models/cdcdeletionmode.md - docs/models/centralapirouter.md - docs/models/chameleon.md - docs/models/chargebee.md @@ -234,10 +292,12 @@ generatedFiles: - docs/models/chargify.md - docs/models/chartmogul.md - docs/models/choosehowtopartitiondata.md + - docs/models/churnkey.md - docs/models/cimis.md - docs/models/cin7.md - docs/models/circa.md - docs/models/circleci.md + - docs/models/ciscomeraki.md - docs/models/clarifai.md - docs/models/clazar.md - docs/models/clickhouse.md @@ -258,6 +318,7 @@ generatedFiles: - docs/models/cohorts.md - docs/models/cohortsrange.md - docs/models/coinapi.md + - docs/models/coingeckocoins.md - docs/models/coinmarketcap.md - docs/models/collection.md - docs/models/compression.md @@ -283,8 +344,11 @@ generatedFiles: - docs/models/convex.md - docs/models/copper.md - docs/models/copyrawfiles.md + - docs/models/couchbase.md - docs/models/countercyclical.md - docs/models/country.md + - docs/models/createdeclarativesourcedefinitionrequest.md + - docs/models/createdefinitionrequest.md - docs/models/credential.md - docs/models/credentials.md - docs/models/credentialstitle.md @@ -292,7 +356,9 @@ generatedFiles: - docs/models/csvcommaseparatedvalues.md - docs/models/csvformat.md - docs/models/csvheaderdefinition.md + - docs/models/cursormethod.md - docs/models/customerio.md + - docs/models/customerly.md - docs/models/customerstatus.md - docs/models/customqueriesarray.md - docs/models/customreportconfig.md @@ -308,8 +374,15 @@ generatedFiles: - docs/models/datasource.md - docs/models/datatype.md - docs/models/daterange.md + - docs/models/days.md - docs/models/dbt.md + - docs/models/declarativesourcedefinitionresponse.md + - docs/models/declarativesourcedefinitionsresponse.md + - docs/models/deepset.md - docs/models/defaultvectorizer.md + - docs/models/definitionofconversioncountinreports.md + - docs/models/definitionresponse.md + - docs/models/definitionsresponse.md - docs/models/deflate.md - docs/models/deletionmode.md - docs/models/delighted.md @@ -319,7 +392,7 @@ generatedFiles: - docs/models/destinationastra.md - docs/models/destinationastralanguage.md - docs/models/destinationastramode.md - - docs/models/destinationastraschemasembeddingembedding1mode.md + - docs/models/destinationastraschemasembeddingembedding5mode.md - docs/models/destinationastraschemasembeddingembeddingmode.md - docs/models/destinationastraschemasembeddingmode.md - docs/models/destinationastraschemasmode.md @@ -332,6 +405,7 @@ generatedFiles: - docs/models/destinationawsdatalakeformattypewildcard.md - docs/models/destinationazureblobstorage.md - docs/models/destinationazureblobstorageazureblobstorage.md + - docs/models/destinationazureblobstorageflattening.md - docs/models/destinationazureblobstorageformattype.md - docs/models/destinationazureblobstoragejsonlinesnewlinedelimitedjson.md - docs/models/destinationbigquery.md @@ -344,9 +418,15 @@ generatedFiles: - docs/models/destinationconfiguration.md - docs/models/destinationconvex.md - docs/models/destinationcreaterequest.md + - docs/models/destinationcustomerio.md + - docs/models/destinationcustomeriocredentials.md + - docs/models/destinationcustomerios3.md + - docs/models/destinationcustomerios3bucketregion.md + - docs/models/destinationcustomeriostoragetype.md - docs/models/destinationdatabricks.md - docs/models/destinationdatabricksauthtype.md - docs/models/destinationdatabricksschemasauthtype.md + - docs/models/destinationdeepset.md - docs/models/destinationdevnull.md - docs/models/destinationdevnullloggingtype.md - docs/models/destinationdevnullschemasloggingtype.md @@ -357,6 +437,7 @@ generatedFiles: - docs/models/destinationdynamodb.md - docs/models/destinationelasticsearch.md - docs/models/destinationelasticsearchmethod.md + - docs/models/destinationelasticsearchnone.md - docs/models/destinationelasticsearchnotunnel.md - docs/models/destinationelasticsearchpasswordauthentication.md - docs/models/destinationelasticsearchschemasauthenticationmethodmethod.md @@ -396,15 +477,19 @@ generatedFiles: - docs/models/destinationgcsschemasformattype.md - docs/models/destinationgcsschemasnocompression.md - docs/models/destinationgooglesheets.md + - docs/models/destinationgooglesheetsauthentication.md + - docs/models/destinationgooglesheetsauthtype.md - docs/models/destinationgooglesheetsgooglesheets.md - - docs/models/destinationiceberg.md - - docs/models/destinationicebergcatalogtype.md - - docs/models/destinationicebergs3.md - - docs/models/destinationicebergs3bucketregion.md - - docs/models/destinationicebergschemascatalogconfigcatalogtype.md - - docs/models/destinationicebergschemascatalogconfigicebergcatalogconfigcatalogtype.md - - docs/models/destinationicebergschemascatalogtype.md - - docs/models/destinationicebergstoragetype.md + - docs/models/destinationgooglesheetsschemasauthtype.md + - docs/models/destinationhubspot.md + - docs/models/destinationhubspotcredentials.md + - docs/models/destinationhubspothubspot.md + - docs/models/destinationhubspotnone.md + - docs/models/destinationhubspotobjectstorageconfiguration.md + - docs/models/destinationhubspots3.md + - docs/models/destinationhubspots3bucketregion.md + - docs/models/destinationhubspotschemasstoragetype.md + - docs/models/destinationhubspotstoragetype.md - docs/models/destinationmilvus.md - docs/models/destinationmilvusapitoken.md - docs/models/destinationmilvusauthentication.md @@ -449,16 +534,30 @@ generatedFiles: - docs/models/destinationmongodbtunnelmethod.md - docs/models/destinationmotherduck.md - docs/models/destinationmssql.md + - docs/models/destinationmssqlloadtype.md + - docs/models/destinationmssqlname.md - docs/models/destinationmssqlnotunnel.md - docs/models/destinationmssqlpasswordauthentication.md - - docs/models/destinationmssqlschemassslmethod.md - - docs/models/destinationmssqlschemassslmethodsslmethod.md + - docs/models/destinationmssqlschemasloadtype.md + - docs/models/destinationmssqlschemasname.md - docs/models/destinationmssqlschemastunnelmethod.md - docs/models/destinationmssqlschemastunnelmethodtunnelmethod.md - docs/models/destinationmssqlsshkeyauthentication.md - docs/models/destinationmssqlsshtunnelmethod.md - - docs/models/destinationmssqlsslmethod.md - docs/models/destinationmssqltunnelmethod.md + - docs/models/destinationmssqlv2.md + - docs/models/destinationmssqlv2bulkload.md + - docs/models/destinationmssqlv2encryptedtrustservercertificate.md + - docs/models/destinationmssqlv2encryptedverifycertificate.md + - docs/models/destinationmssqlv2insertload.md + - docs/models/destinationmssqlv2loadtype.md + - docs/models/destinationmssqlv2name.md + - docs/models/destinationmssqlv2schemasloadtype.md + - docs/models/destinationmssqlv2schemasloadtypeloadtype.md + - docs/models/destinationmssqlv2schemasname.md + - docs/models/destinationmssqlv2schemassslmethodname.md + - docs/models/destinationmssqlv2sslmethod.md + - docs/models/destinationmssqlv2unencrypted.md - docs/models/destinationmysql.md - docs/models/destinationmysqlnotunnel.md - docs/models/destinationmysqlpasswordauthentication.md @@ -468,6 +567,7 @@ generatedFiles: - docs/models/destinationmysqlsshtunnelmethod.md - docs/models/destinationmysqltunnelmethod.md - docs/models/destinationoracle.md + - docs/models/destinationoracleencryption.md - docs/models/destinationoracleencryptionmethod.md - docs/models/destinationoraclenotunnel.md - docs/models/destinationoraclepasswordauthentication.md @@ -531,7 +631,7 @@ generatedFiles: - docs/models/destinationpostgrespasswordauthentication.md - docs/models/destinationpostgresschemasmode.md - docs/models/destinationpostgresschemassslmodemode.md - - docs/models/destinationpostgresschemassslmodesslmodes1mode.md + - docs/models/destinationpostgresschemassslmodesslmodes5mode.md - docs/models/destinationpostgresschemassslmodesslmodes6mode.md - docs/models/destinationpostgresschemassslmodesslmodesmode.md - docs/models/destinationpostgresschemastunnelmethod.md @@ -600,19 +700,14 @@ generatedFiles: - docs/models/destinations3compressioncodec.md - docs/models/destinations3compressiontype.md - docs/models/destinations3csvcommaseparatedvalues.md + - docs/models/destinations3datalake.md + - docs/models/destinations3datalakecatalogtype.md + - docs/models/destinations3datalakes3bucketregion.md + - docs/models/destinations3datalakeschemascatalogtype.md + - docs/models/destinations3datalakeschemascatalogtypecatalogtype.md - docs/models/destinations3deflate.md - docs/models/destinations3flattening.md - docs/models/destinations3formattype.md - - docs/models/destinations3glue.md - - docs/models/destinations3gluecompression.md - - docs/models/destinations3gluecompressiontype.md - - docs/models/destinations3glueformattype.md - - docs/models/destinations3gluegzip.md - - docs/models/destinations3gluejsonlinesnewlinedelimitedjson.md - - docs/models/destinations3gluenocompression.md - - docs/models/destinations3glueoutputformat.md - - docs/models/destinations3glues3bucketregion.md - - docs/models/destinations3glueschemascompressiontype.md - docs/models/destinations3gzip.md - docs/models/destinations3jsonlinesnewlinedelimitedjson.md - docs/models/destinations3nocompression.md @@ -639,6 +734,14 @@ generatedFiles: - docs/models/destinations3snappy.md - docs/models/destinations3xz.md - docs/models/destinations3zstandard.md + - docs/models/destinationsalesforce.md + - docs/models/destinationsalesforcenone.md + - docs/models/destinationsalesforceobjectstorageconfiguration.md + - docs/models/destinationsalesforces3.md + - docs/models/destinationsalesforces3bucketregion.md + - docs/models/destinationsalesforcesalesforce.md + - docs/models/destinationsalesforceschemasstoragetype.md + - docs/models/destinationsalesforcestoragetype.md - docs/models/destinationsftpjson.md - docs/models/destinationsnowflake.md - docs/models/destinationsnowflakeauthtype.md @@ -668,14 +771,16 @@ generatedFiles: - docs/models/destinationsnowflakeoauth20.md - docs/models/destinationsnowflakeschemasauthtype.md - docs/models/destinationsnowflakeschemascredentialsauthtype.md - - docs/models/destinationsnowflakesnowflake.md - docs/models/destinationsresponse.md + - docs/models/destinationsurrealdb.md - docs/models/destinationteradata.md - docs/models/destinationteradataallow.md + - docs/models/destinationteradataauthtype.md - docs/models/destinationteradatadisable.md - docs/models/destinationteradatamode.md - docs/models/destinationteradataprefer.md - docs/models/destinationteradatarequire.md + - docs/models/destinationteradataschemasauthtype.md - docs/models/destinationteradataschemasmode.md - docs/models/destinationteradataschemassslmodemode.md - docs/models/destinationteradataschemassslmodesslmodes5mode.md @@ -744,12 +849,14 @@ generatedFiles: - docs/models/devnull.md - docs/models/dimension.md - docs/models/dimensionsfilter.md + - docs/models/dingconnect.md - docs/models/disable.md - docs/models/disabled.md - docs/models/distancemetric.md - docs/models/dixa.md - docs/models/dockerhub.md - - docs/models/documentfiletypeformatexperimental.md + - docs/models/docuseal.md + - docs/models/dolibarr.md - docs/models/domain.md - docs/models/domainregioncode.md - docs/models/doublevalue.md @@ -759,13 +866,17 @@ generatedFiles: - docs/models/drip.md - docs/models/dropboxsign.md - docs/models/duckdb.md + - docs/models/dwolla.md - docs/models/dynamodb.md - docs/models/dynamodbregion.md - docs/models/easypost.md - docs/models/easypromos.md + - docs/models/ebayfinance.md + - docs/models/ebayfulfillment.md - docs/models/economic.md - docs/models/elasticemail.md - docs/models/elasticsearch.md + - docs/models/emailnotificationconfig.md - docs/models/emailoctopus.md - docs/models/embedding.md - docs/models/employmenthero.md @@ -774,16 +885,21 @@ generatedFiles: - docs/models/encryptedtrustservercertificate.md - docs/models/encryptedverifycertificate.md - docs/models/encryption.md + - docs/models/encryptionaes.md - docs/models/encryptionalgorithm.md + - docs/models/encryptionmapperalgorithm.md - docs/models/encryptionmethod.md + - docs/models/encryptionrsa.md - docs/models/engagementwindowdays.md - docs/models/enterprise.md - docs/models/entity.md - docs/models/environment.md + - docs/models/equal.md - docs/models/eubasedaccount.md - docs/models/eventbrite.md - docs/models/eventee.md - docs/models/eventzilla.md + - docs/models/everhour.md - docs/models/everynthentry.md - docs/models/excelformat.md - docs/models/exchangerates.md @@ -792,16 +908,20 @@ generatedFiles: - docs/models/ezofficeinventory.md - docs/models/facebookmarketing.md - docs/models/facebookmarketingcredentials.md + - docs/models/facebookpages.md - docs/models/factorial.md - docs/models/failing.md - docs/models/fake.md - docs/models/faker.md + - docs/models/fastbill.md + - docs/models/fastly.md - docs/models/fauna.md - docs/models/fieldnamemappingconfigmodel.md + - docs/models/fieldrenaming.md + - docs/models/fields.md - docs/models/file.md - docs/models/filebasedstreamconfig.md - docs/models/fileformat.md - - docs/models/filestorageformat.md - docs/models/filetype.md - docs/models/fillout.md - docs/models/filter_.md @@ -809,6 +929,7 @@ generatedFiles: - docs/models/filtername.md - docs/models/filtertype.md - docs/models/finage.md + - docs/models/financialeventsstepsizeindays.md - docs/models/financialmodelling.md - docs/models/finnhub.md - docs/models/finnworlds.md @@ -839,17 +960,17 @@ generatedFiles: - docs/models/fromvalue.md - docs/models/front.md - docs/models/fulcrum.md + - docs/models/fullstory.md - docs/models/gainsightpx.md - docs/models/gcs.md - docs/models/gcsbucketregion.md - docs/models/gcscredentials.md - docs/models/gcsgooglecloudstorage.md - docs/models/gcsstaging.md - - docs/models/gcstmpfilesafterwardprocessing.md - - docs/models/geographyenum.md - - docs/models/geographyenumnodefault.md + - docs/models/gcstmpfilespostprocessing.md - docs/models/getgist.md - docs/models/getlago.md + - docs/models/giphy.md - docs/models/gitbook.md - docs/models/github.md - docs/models/githubcredentials.md @@ -863,6 +984,7 @@ generatedFiles: - docs/models/gocardless.md - docs/models/gocardlessapienvironment.md - docs/models/goldcast.md + - docs/models/gologin.md - docs/models/gong.md - docs/models/googleads.md - docs/models/googleadscredentials.md @@ -886,38 +1008,46 @@ generatedFiles: - docs/models/granularityforgeolocationregion.md - docs/models/granularityforperiodicreports.md - docs/models/greenhouse.md + - docs/models/greythr.md - docs/models/gridly.md - docs/models/groupby.md - docs/models/guru.md - docs/models/gutendex.md - docs/models/gzip.md - - docs/models/hadoopcatalogusehierarchicalfilesystemsassameasstorageconfig.md - docs/models/hardcodedrecords.md + - docs/models/harness.md - docs/models/harvest.md + - docs/models/hashing.md + - docs/models/hashingmethod.md - docs/models/header.md - docs/models/headerdefinitiontype.md - docs/models/height.md + - docs/models/hellobaton.md + - docs/models/helpscout.md - docs/models/hibob.md - docs/models/highlevel.md - - docs/models/hivecataloguseapachehivemetastore.md - docs/models/hmackey.md + - docs/models/hoorayhr.md - docs/models/httpspublicweb.md - docs/models/hubplanner.md - docs/models/hubspot.md - docs/models/hubspotcredentials.md + - docs/models/huggingfacedatasets.md - docs/models/humanitix.md + - docs/models/huntr.md - docs/models/iamrole.md - docs/models/iamuser.md - - docs/models/iceberg.md - - docs/models/icebergcatalogconfig.md - docs/models/illuminabasespace.md + - docs/models/imagga.md - docs/models/in_.md - docs/models/incidentio.md - docs/models/indexing.md - docs/models/inflowinventory.md - docs/models/initiateoauthrequest.md - docs/models/inlistfilter.md + - docs/models/insertload.md - docs/models/insightconfig.md + - docs/models/insightful.md - docs/models/insightly.md - docs/models/instagram.md - docs/models/instance.md @@ -925,26 +1055,31 @@ generatedFiles: - docs/models/int64value.md - docs/models/intercom.md - docs/models/interval.md + - docs/models/intruder.md - docs/models/invalidcdcpositionbehavioradvanced.md - docs/models/invoiced.md - docs/models/invoiceninja.md - docs/models/ip2whois.md - docs/models/iterable.md - - docs/models/jdbccataloguserelationaldatabase.md + - docs/models/jamfpro.md - docs/models/jira.md - docs/models/jobcreaterequest.md - docs/models/jobnimbus.md - docs/models/jobresponse.md - docs/models/jobsresponse.md - docs/models/jobstatusenum.md + - docs/models/jobtype.md - docs/models/jobtypeenum.md + - docs/models/jobtyperesourcelimit.md - docs/models/jotform.md - docs/models/jsonlformat.md - docs/models/jsonlinesnewlinedelimitedjson.md + - docs/models/judgemereviews.md - docs/models/justcall.md - docs/models/justsift.md - docs/models/k6cloud.md - docs/models/katana.md + - docs/models/keka.md - docs/models/keypairauthentication.md - docs/models/kind.md - docs/models/kisi.md @@ -956,6 +1091,7 @@ generatedFiles: - docs/models/lang.md - docs/models/language.md - docs/models/launchdarkly.md + - docs/models/ldap.md - docs/models/leadfeeder.md - docs/models/lemlist.md - docs/models/lessannoyingcrm.md @@ -963,11 +1099,13 @@ generatedFiles: - docs/models/leverhiring.md - docs/models/leverhiringcredentials.md - docs/models/lightspeedretail.md + - docs/models/linear.md - docs/models/linkedinads.md - docs/models/linkedinadscredentials.md - docs/models/linkedinpages.md - docs/models/linnworks.md - docs/models/loadingmethod.md + - docs/models/loadtype.md - docs/models/lob.md - docs/models/local.md - docs/models/localfilesystemlimited.md @@ -982,6 +1120,7 @@ generatedFiles: - docs/models/mailchimp.md - docs/models/mailchimpcredentials.md - docs/models/mailerlite.md + - docs/models/mailersend.md - docs/models/mailgun.md - docs/models/mailjetmail.md - docs/models/mailjetsms.md @@ -991,7 +1130,10 @@ generatedFiles: - docs/models/marketnewscategory.md - docs/models/marketo.md - docs/models/marketstack.md + - docs/models/mendeley.md - docs/models/mention.md + - docs/models/mercadoads.md + - docs/models/merge.md - docs/models/metabase.md - docs/models/method.md - docs/models/metricsfilter.md @@ -1019,17 +1161,26 @@ generatedFiles: - docs/models/mongodbv2.md - docs/models/motherduck.md - docs/models/mssql.md + - docs/models/mssqlv2.md - docs/models/mux.md - docs/models/myhours.md - docs/models/mysql.md - docs/models/n8n.md + - docs/models/name.md - docs/models/namespacedefinitionenum.md - docs/models/namespacedefinitionenumnodefault.md - docs/models/nasa.md - docs/models/nativenetworkencryptionnne.md + - docs/models/navan.md + - docs/models/nebiusai.md + - docs/models/nessiecatalog.md - docs/models/netsuite.md + - docs/models/netsuiteenterprise.md - docs/models/newsapi.md + - docs/models/newsdata.md - docs/models/newsdataio.md + - docs/models/nexiopay.md + - docs/models/ninjaonermm.md - docs/models/noauth.md - docs/models/noauthentication.md - docs/models/nocompression.md @@ -1039,9 +1190,11 @@ generatedFiles: - docs/models/nonbreakingschemaupdatesbehaviorenumnodefault.md - docs/models/nonet.md - docs/models/normalization.md - - docs/models/normalizationflattening.md - docs/models/northpasslms.md + - docs/models/not_.md - docs/models/notexpression.md + - docs/models/notificationconfig.md + - docs/models/notificationsconfig.md - docs/models/notion.md - docs/models/notioncredentials.md - docs/models/notunnel.md @@ -1055,14 +1208,16 @@ generatedFiles: - docs/models/oauth20credentials.md - docs/models/oauth20withprivatekey.md - docs/models/oauth2accesstoken.md + - docs/models/oauth2authentication.md - docs/models/oauth2confidentialapplication.md - docs/models/oauth2recommended.md - docs/models/oauthactornames.md - docs/models/oauthauthentication.md - - docs/models/oauthcredentialsconfiguration.md + - docs/models/objectstorageconfiguration.md - docs/models/okta.md - docs/models/omnisend.md - docs/models/oncehub.md + - docs/models/onehundredms.md - docs/models/onepagecrm.md - docs/models/onesignal.md - docs/models/onfleet.md @@ -1070,6 +1225,7 @@ generatedFiles: - docs/models/openaicompatible.md - docs/models/openaq.md - docs/models/opendatadc.md + - docs/models/openexchangerates.md - docs/models/openfda.md - docs/models/openweather.md - docs/models/operator.md @@ -1077,9 +1233,11 @@ generatedFiles: - docs/models/opsgenie.md - docs/models/optionslist.md - docs/models/optiontitle.md + - docs/models/opuswatch.md - docs/models/oracle.md + - docs/models/oracleenterprise.md - docs/models/orb.md - - docs/models/orbit.md + - docs/models/organizationoauthcredentialsrequest.md - docs/models/organizationresponse.md - docs/models/organizationsresponse.md - docs/models/orgroup.md @@ -1088,9 +1246,13 @@ generatedFiles: - docs/models/outbrainamplify.md - docs/models/outputformat.md - docs/models/outputformatwildcard.md + - docs/models/outputsize.md - docs/models/outreach.md - docs/models/oveit.md - docs/models/pabblysubscriptionsbilling.md + - docs/models/padding.md + - docs/models/paddle.md + - docs/models/pagerduty.md - docs/models/pandadoc.md - docs/models/paperform.md - docs/models/papersign.md @@ -1098,11 +1260,15 @@ generatedFiles: - docs/models/parquetcolumnarstorage.md - docs/models/parquetformat.md - docs/models/parsingstrategy.md + - docs/models/partnerize.md + - docs/models/partnerstack.md - docs/models/passwordauthentication.md + - docs/models/payfit.md - docs/models/paypaltransaction.md - docs/models/paystack.md - docs/models/pendo.md - docs/models/pennylane.md + - docs/models/perigon.md - docs/models/periodusedformostpopularstreams.md - docs/models/permissioncreaterequest.md - docs/models/permissionresponse.md @@ -1116,8 +1282,10 @@ generatedFiles: - docs/models/personalaccesstoken.md - docs/models/pexelsapi.md - docs/models/pgvector.md + - docs/models/phyllo.md - docs/models/picqer.md - docs/models/pinecone.md + - docs/models/pingdom.md - docs/models/pinterest.md - docs/models/pinterestcredentials.md - docs/models/pipedrive.md @@ -1134,6 +1302,7 @@ generatedFiles: - docs/models/pokeapi.md - docs/models/pokemonname.md - docs/models/polygonstockapi.md + - docs/models/poplar.md - docs/models/postgres.md - docs/models/postgresconnection.md - docs/models/posthog.md @@ -1143,6 +1312,7 @@ generatedFiles: - docs/models/prestashop.md - docs/models/pretix.md - docs/models/primetric.md + - docs/models/printify.md - docs/models/privateapp.md - docs/models/privatetoken.md - docs/models/processing.md @@ -1151,6 +1321,7 @@ generatedFiles: - docs/models/productcatalog.md - docs/models/productive.md - docs/models/projectsecret.md + - docs/models/protocol.md - docs/models/publicpermissiontype.md - docs/models/pubsub.md - docs/models/pypi.md @@ -1158,12 +1329,12 @@ generatedFiles: - docs/models/qualaroo.md - docs/models/queries.md - docs/models/quickbooks.md + - docs/models/raas.md - docs/models/railz.md - docs/models/randomsampling.md - docs/models/range.md - docs/models/rdstationmarketing.md - docs/models/rdstationmarketingauthorization.md - - docs/models/readchangesusingbinarylogcdc.md - docs/models/readchangesusingchangedatacapturecdc.md - docs/models/readchangesusingwriteaheadlogcdc.md - docs/models/recharge.md @@ -1174,34 +1345,46 @@ generatedFiles: - docs/models/redis.md - docs/models/redshift.md - docs/models/referralhero.md + - docs/models/refreshtokenendpoint.md - docs/models/region.md - docs/models/rentcast.md - docs/models/repairshopr.md - docs/models/replicaset.md + - docs/models/replicatepermissionsacl.md - docs/models/replicaterecords.md - docs/models/replyio.md + - docs/models/reportbasedstreams.md - docs/models/reportconfig.md - docs/models/reportingdataobject.md - docs/models/reportname.md - docs/models/reportoptions.md - - docs/models/reportrecordtypeenum.md - docs/models/require.md - docs/models/required.md + - docs/models/resolution.md + - docs/models/resourcerequirements.md + - docs/models/rest.md + - docs/models/restapistreams.md - docs/models/restcatalog.md + - docs/models/retailexpressbymaropost.md - docs/models/retently.md - docs/models/revenuecat.md - docs/models/revolutmerchant.md + - docs/models/ringcentral.md - docs/models/rkicovid.md + - docs/models/rocketchat.md - docs/models/rocketlane.md - docs/models/rolebasedauthentication.md - docs/models/rollbar.md - docs/models/rootly.md + - docs/models/rowfiltering.md + - docs/models/rowfilteringoperation.md + - docs/models/rowfilteringoperationtype.md - docs/models/rss.md - docs/models/ruddr.md - docs/models/s3.md - docs/models/s3amazonwebservices.md - docs/models/s3bucketregion.md - - docs/models/s3glue.md + - docs/models/s3datalake.md - docs/models/safetyculture.md - docs/models/sagehr.md - docs/models/salesflare.md @@ -1209,12 +1392,14 @@ generatedFiles: - docs/models/salesloft.md - docs/models/sandboxaccesstoken.md - docs/models/sapfieldglass.md + - docs/models/saphanaenterprise.md - docs/models/savvycal.md - docs/models/scanchangeswithuserdefinedcursor.md - docs/models/scheduletypeenum.md - docs/models/scheduletypewithbasicenum.md - docs/models/schemebasicauth.md - docs/models/schemeclientcredentials.md + - docs/models/scopedresourcerequirements.md - docs/models/scopetype.md - docs/models/scpsecurecopyprotocol.md - docs/models/scryfall.md @@ -1232,31 +1417,38 @@ generatedFiles: - docs/models/sendpulse.md - docs/models/senseforce.md - docs/models/sentry.md - - docs/models/serializationlibrary.md - - docs/models/servermanaged.md + - docs/models/serpstat.md - docs/models/serviceaccount.md - docs/models/serviceaccountauthentication.md - docs/models/serviceaccountkey.md - docs/models/serviceaccountkeyauthentication.md + - docs/models/servicedetails.md - docs/models/servicekeyauthentication.md - docs/models/servicename.md + - docs/models/servicenow.md - docs/models/sevenshifts.md - docs/models/sftp.md - docs/models/sftpbulk.md - docs/models/sftpjson.md - docs/models/sftpsecurefiletransferprotocol.md + - docs/models/sharepointenterprise.md + - docs/models/sharepointenterprisecredentials.md - docs/models/sharetribe.md - docs/models/sharetypeusedformostpopularsharedstream.md - docs/models/shippo.md + - docs/models/shipstation.md - docs/models/shopify.md - docs/models/shopifyauthorizationmethod.md - docs/models/shopifycredentials.md + - docs/models/shopwired.md - docs/models/shortcut.md - docs/models/shortio.md + - docs/models/shutterstock.md - docs/models/sigmacomputing.md - docs/models/signinviagoogleoauth.md - docs/models/signinviardstationoauth.md - docs/models/signinviaslackoauth.md + - docs/models/signnow.md - docs/models/silent.md - docs/models/simfin.md - docs/models/simplecast.md @@ -1276,10 +1468,10 @@ generatedFiles: - docs/models/snowflake.md - docs/models/snowflakeconnection.md - docs/models/snowflakecortex.md - - docs/models/snowflakecredentials.md - docs/models/solarwindsservicedesk.md - docs/models/sonarcloud.md - docs/models/sortby.md + - docs/models/source100ms.md - docs/models/source7shifts.md - docs/models/sourceactivecampaign.md - docs/models/sourceagilecrm.md @@ -1295,6 +1487,9 @@ generatedFiles: - docs/models/sourceairtableschemasauthmethod.md - docs/models/sourceakeneo.md - docs/models/sourcealgolia.md + - docs/models/sourcealpacabrokerapi.md + - docs/models/sourcealpacabrokerapienvironment.md + - docs/models/sourcealphavantage.md - docs/models/sourceamazonads.md - docs/models/sourceamazonadsamazonads.md - docs/models/sourceamazonadsauthtype.md @@ -1309,16 +1504,20 @@ generatedFiles: - docs/models/sourceappfigures.md - docs/models/sourceappfollow.md - docs/models/sourceapplesearchads.md + - docs/models/sourceappsflyer.md - docs/models/sourceapptivo.md - docs/models/sourceasana.md - docs/models/sourceasanaasana.md - docs/models/sourceasanacredentialstitle.md - docs/models/sourceasanaschemascredentialstitle.md - docs/models/sourceashby.md + - docs/models/sourceassemblyai.md - docs/models/sourceauth0.md - docs/models/sourceauth0authenticationmethod.md - docs/models/sourceauth0schemasauthenticationmethod.md - docs/models/sourceauth0schemascredentialsauthenticationmethod.md + - docs/models/sourceaviationstack.md + - docs/models/sourceawinadvertiser.md - docs/models/sourceawscloudtrail.md - docs/models/sourceazureblobstorage.md - docs/models/sourceazureblobstorageauthentication.md @@ -1328,11 +1527,14 @@ generatedFiles: - docs/models/sourceazureblobstorageheaderdefinitiontype.md - docs/models/sourceazureblobstoragemode.md - docs/models/sourceazureblobstorageschemasauthtype.md + - docs/models/sourceazureblobstorageschemascredentialsauthtype.md - docs/models/sourceazureblobstorageschemasfiletype.md - docs/models/sourceazureblobstorageschemasheaderdefinitiontype.md - docs/models/sourceazureblobstorageschemasstreamsfiletype.md - docs/models/sourceazureblobstorageschemasstreamsformatfiletype.md - docs/models/sourceazuretable.md + - docs/models/sourcebabelforce.md + - docs/models/sourcebabelforceregion.md - docs/models/sourcebamboohr.md - docs/models/sourcebasecamp.md - docs/models/sourcebeamer.md @@ -1343,10 +1545,13 @@ generatedFiles: - docs/models/sourcebingadsbingads.md - docs/models/sourcebitly.md - docs/models/sourceblogger.md + - docs/models/sourcebluetally.md + - docs/models/sourceboldsign.md - docs/models/sourcebox.md - docs/models/sourcebraintree.md - docs/models/sourcebraintreeenvironment.md - docs/models/sourcebraze.md + - docs/models/sourcebreezometer.md - docs/models/sourcebreezyhr.md - docs/models/sourcebrevo.md - docs/models/sourcebrex.md @@ -1361,6 +1566,7 @@ generatedFiles: - docs/models/sourcecampayn.md - docs/models/sourcecanny.md - docs/models/sourcecapsulecrm.md + - docs/models/sourcecaptaindata.md - docs/models/sourcecarequalitycommission.md - docs/models/sourcecart.md - docs/models/sourcecartauthorizationmethod.md @@ -1372,10 +1578,12 @@ generatedFiles: - docs/models/sourcechargedesk.md - docs/models/sourcechargify.md - docs/models/sourcechartmogul.md + - docs/models/sourcechurnkey.md - docs/models/sourcecimis.md - docs/models/sourcecin7.md - docs/models/sourcecirca.md - docs/models/sourcecircleci.md + - docs/models/sourceciscomeraki.md - docs/models/sourceclarifai.md - docs/models/sourceclazar.md - docs/models/sourceclickhouse.md @@ -1396,6 +1604,7 @@ generatedFiles: - docs/models/sourcecoda.md - docs/models/sourcecodefresh.md - docs/models/sourcecoinapi.md + - docs/models/sourcecoingeckocoins.md - docs/models/sourcecoinmarketcap.md - docs/models/sourceconcord.md - docs/models/sourceconcordenvironment.md @@ -1403,19 +1612,28 @@ generatedFiles: - docs/models/sourceconfiguration.md - docs/models/sourceconfluence.md - docs/models/sourceconvertkit.md + - docs/models/sourceconvertkitauthtype.md + - docs/models/sourceconvertkitoauth20.md + - docs/models/sourceconvertkitschemasauthtype.md - docs/models/sourceconvex.md - docs/models/sourceconvexconvex.md - docs/models/sourcecopper.md + - docs/models/sourcecouchbase.md - docs/models/sourcecountercyclical.md - docs/models/sourcecreaterequest.md - docs/models/sourcecustomerio.md + - docs/models/sourcecustomeriocustomerio.md + - docs/models/sourcecustomerly.md - docs/models/sourcedatadog.md - docs/models/sourcedatascope.md - docs/models/sourcedbt.md - docs/models/sourcedelighted.md - docs/models/sourcedeputy.md + - docs/models/sourcedingconnect.md - docs/models/sourcedixa.md - docs/models/sourcedockerhub.md + - docs/models/sourcedocuseal.md + - docs/models/sourcedolibarr.md - docs/models/sourcedremio.md - docs/models/sourcedrift.md - docs/models/sourcedriftauthorizationmethod.md @@ -1425,6 +1643,8 @@ generatedFiles: - docs/models/sourcedriftschemascredentials.md - docs/models/sourcedrip.md - docs/models/sourcedropboxsign.md + - docs/models/sourcedwolla.md + - docs/models/sourcedwollaenvironment.md - docs/models/sourcedynamodb.md - docs/models/sourcedynamodbauthtype.md - docs/models/sourcedynamodbcredentials.md @@ -1433,30 +1653,46 @@ generatedFiles: - docs/models/sourcedynamodbschemasauthtype.md - docs/models/sourceeasypost.md - docs/models/sourceeasypromos.md + - docs/models/sourceebayfinance.md + - docs/models/sourceebayfulfillment.md + - docs/models/sourceebayfulfillmentapihost.md + - docs/models/sourceebayfulfillmentrefreshtokenendpoint.md - docs/models/sourceeconomic.md - docs/models/sourceelasticemail.md + - docs/models/sourceelasticsearch.md + - docs/models/sourceelasticsearchapikeysecret.md + - docs/models/sourceelasticsearchauthenticationmethod.md + - docs/models/sourceelasticsearchelasticsearch.md + - docs/models/sourceelasticsearchmethod.md + - docs/models/sourceelasticsearchnone.md + - docs/models/sourceelasticsearchschemasauthenticationmethodmethod.md + - docs/models/sourceelasticsearchschemasmethod.md + - docs/models/sourceelasticsearchusernamepassword.md - docs/models/sourceemailoctopus.md - docs/models/sourceemploymenthero.md - docs/models/sourceencharge.md - docs/models/sourceeventbrite.md - docs/models/sourceeventee.md - docs/models/sourceeventzilla.md + - docs/models/sourceeverhour.md - docs/models/sourceexchangerates.md - docs/models/sourceezofficeinventory.md - docs/models/sourcefacebookmarketing.md - - docs/models/sourcefacebookmarketingactionreporttime.md - docs/models/sourcefacebookmarketingauthentication.md - docs/models/sourcefacebookmarketingauthtype.md - docs/models/sourcefacebookmarketingfacebookmarketing.md - docs/models/sourcefacebookmarketingschemasauthtype.md + - docs/models/sourcefacebookmarketingserviceaccountkeyauthentication.md - docs/models/sourcefacebookmarketingvalidenums.md + - docs/models/sourcefacebookpages.md - docs/models/sourcefactorial.md - docs/models/sourcefaker.md + - docs/models/sourcefastbill.md + - docs/models/sourcefastly.md - docs/models/sourcefauna.md - docs/models/sourcefaunadeletionmode.md - docs/models/sourcefaunaschemasdeletionmode.md - docs/models/sourcefile.md - - docs/models/sourcefilefileformat.md - docs/models/sourcefileschemasproviderstorage.md - docs/models/sourcefileschemasproviderstorageprovider6storage.md - docs/models/sourcefileschemasproviderstorageprovider7storage.md @@ -1488,8 +1724,10 @@ generatedFiles: - docs/models/sourcefreshservice.md - docs/models/sourcefront.md - docs/models/sourcefulcrum.md + - docs/models/sourcefullstory.md - docs/models/sourcegainsightpx.md - docs/models/sourcegcs.md + - docs/models/sourcegcsauthenticateviagoogleoauth.md - docs/models/sourcegcsauthentication.md - docs/models/sourcegcsauthtype.md - docs/models/sourcegcsautogenerated.md @@ -1517,14 +1755,17 @@ generatedFiles: - docs/models/sourcegcsschemasstreamsformatformat6filetype.md - docs/models/sourcegcsschemasstreamsformatformatfiletype.md - docs/models/sourcegcsschemasstreamsheaderdefinitiontype.md + - docs/models/sourcegcsunstructureddocumentformat.md - docs/models/sourcegcsuserprovided.md - docs/models/sourcegcsvalidationpolicy.md - docs/models/sourcegetgist.md - docs/models/sourcegetlago.md + - docs/models/sourcegiphy.md - docs/models/sourcegitbook.md - docs/models/sourcegithub.md - docs/models/sourcegithubauthentication.md - docs/models/sourcegithubgithub.md + - docs/models/sourcegithuboauth.md - docs/models/sourcegithuboptiontitle.md - docs/models/sourcegithubpersonalaccesstoken.md - docs/models/sourcegitlab.md @@ -1541,6 +1782,7 @@ generatedFiles: - docs/models/sourcegnewssortby.md - docs/models/sourcegocardless.md - docs/models/sourcegoldcast.md + - docs/models/sourcegologin.md - docs/models/sourcegong.md - docs/models/sourcegoogleads.md - docs/models/sourcegoogleadsgoogleads.md @@ -1785,7 +2027,8 @@ generatedFiles: - docs/models/sourcegoogledriveavroformat.md - docs/models/sourcegoogledrivecsvformat.md - docs/models/sourcegoogledrivecsvheaderdefinition.md - - docs/models/sourcegoogledrivedocumentfiletypeformatexperimental.md + - docs/models/sourcegoogledrivedeliverytype.md + - docs/models/sourcegoogledriveexcelformat.md - docs/models/sourcegoogledrivefilebasedstreamconfig.md - docs/models/sourcegoogledrivefiletype.md - docs/models/sourcegoogledriveformat.md @@ -1799,18 +2042,22 @@ generatedFiles: - docs/models/sourcegoogledriveparsingstrategy.md - docs/models/sourcegoogledriveprocessing.md - docs/models/sourcegoogledriveschemasauthtype.md + - docs/models/sourcegoogledriveschemasdeliverytype.md - docs/models/sourcegoogledriveschemasfiletype.md - docs/models/sourcegoogledriveschemasheaderdefinitiontype.md - docs/models/sourcegoogledriveschemasstreamsfiletype.md - docs/models/sourcegoogledriveschemasstreamsformatfiletype.md + - docs/models/sourcegoogledriveschemasstreamsformatformat6filetype.md - docs/models/sourcegoogledriveschemasstreamsformatformatfiletype.md - docs/models/sourcegoogledriveschemasstreamsheaderdefinitiontype.md - docs/models/sourcegoogledriveserviceaccountkeyauthentication.md + - docs/models/sourcegoogledriveunstructureddocumentformat.md - docs/models/sourcegoogledriveuserprovided.md - docs/models/sourcegoogledrivevalidationpolicy.md - docs/models/sourcegoogleforms.md - docs/models/sourcegooglepagespeedinsights.md - docs/models/sourcegooglesearchconsole.md + - docs/models/sourcegooglesearchconsoleauthenticationtype.md - docs/models/sourcegooglesearchconsoleauthtype.md - docs/models/sourcegooglesearchconsolecustomreportconfig.md - docs/models/sourcegooglesearchconsolegooglesearchconsole.md @@ -1829,18 +2076,23 @@ generatedFiles: - docs/models/sourcegooglewebfonts.md - docs/models/sourcegorgias.md - docs/models/sourcegreenhouse.md + - docs/models/sourcegreythr.md - docs/models/sourcegridly.md - docs/models/sourceguru.md - docs/models/sourcegutendex.md - docs/models/sourcehardcodedrecords.md + - docs/models/sourceharness.md - docs/models/sourceharvest.md - docs/models/sourceharvestauthenticatewithpersonalaccesstoken.md - docs/models/sourceharvestauthenticationmechanism.md - docs/models/sourceharvestauthtype.md - docs/models/sourceharvestschemasauthtype.md - docs/models/sourceheight.md + - docs/models/sourcehellobaton.md + - docs/models/sourcehelpscout.md - docs/models/sourcehibob.md - docs/models/sourcehighlevel.md + - docs/models/sourcehoorayhr.md - docs/models/sourcehubplanner.md - docs/models/sourcehubspot.md - docs/models/sourcehubspotauthentication.md @@ -1848,29 +2100,36 @@ generatedFiles: - docs/models/sourcehubspothubspot.md - docs/models/sourcehubspotoauth.md - docs/models/sourcehubspotschemasauthtype.md + - docs/models/sourcehuggingfacedatasets.md - docs/models/sourcehumanitix.md + - docs/models/sourcehuntr.md - docs/models/sourceilluminabasespace.md + - docs/models/sourceimagga.md - docs/models/sourceincidentio.md - docs/models/sourceinflowinventory.md + - docs/models/sourceinsightful.md - docs/models/sourceinsightly.md - docs/models/sourceinstagram.md - docs/models/sourceinstagraminstagram.md - docs/models/sourceinstatus.md - docs/models/sourceintercom.md - - docs/models/sourceintercomintercom.md + - docs/models/sourceintruder.md - docs/models/sourceinvoiced.md - docs/models/sourceinvoiceninja.md - docs/models/sourceip2whois.md - docs/models/sourceiterable.md + - docs/models/sourcejamfpro.md - docs/models/sourcejira.md - docs/models/sourcejobnimbus.md - docs/models/sourcejotform.md - docs/models/sourcejotformapiendpoint.md - docs/models/sourcejotformschemasapiendpoint.md + - docs/models/sourcejudgemereviews.md - docs/models/sourcejustcall.md - docs/models/sourcejustsift.md - docs/models/sourcek6cloud.md - docs/models/sourcekatana.md + - docs/models/sourcekeka.md - docs/models/sourcekisi.md - docs/models/sourcekissmetrics.md - docs/models/sourceklarna.md @@ -1889,6 +2148,7 @@ generatedFiles: - docs/models/sourceleverhiringleverhiring.md - docs/models/sourceleverhiringschemasauthtype.md - docs/models/sourcelightspeedretail.md + - docs/models/sourcelinear.md - docs/models/sourcelinkedinads.md - docs/models/sourcelinkedinadsaccesstoken.md - docs/models/sourcelinkedinadsauthentication.md @@ -1908,12 +2168,14 @@ generatedFiles: - docs/models/sourcelooker.md - docs/models/sourceluma.md - docs/models/sourcemailchimp.md + - docs/models/sourcemailchimpapikey.md - docs/models/sourcemailchimpauthentication.md - docs/models/sourcemailchimpauthtype.md - docs/models/sourcemailchimpmailchimp.md - docs/models/sourcemailchimpoauth20.md - docs/models/sourcemailchimpschemasauthtype.md - docs/models/sourcemailerlite.md + - docs/models/sourcemailersend.md - docs/models/sourcemailgun.md - docs/models/sourcemailjetmail.md - docs/models/sourcemailjetsms.md @@ -1921,7 +2183,10 @@ generatedFiles: - docs/models/sourcemailtrap.md - docs/models/sourcemarketo.md - docs/models/sourcemarketstack.md + - docs/models/sourcemendeley.md - docs/models/sourcemention.md + - docs/models/sourcemercadoads.md + - docs/models/sourcemerge.md - docs/models/sourcemetabase.md - docs/models/sourcemicrosoftdataverse.md - docs/models/sourcemicrosoftentraid.md @@ -1961,8 +2226,11 @@ generatedFiles: - docs/models/sourcemicrosoftsharepointauthtype.md - docs/models/sourcemicrosoftsharepointautogenerated.md - docs/models/sourcemicrosoftsharepointavroformat.md + - docs/models/sourcemicrosoftsharepointcopyrawfiles.md - docs/models/sourcemicrosoftsharepointcsvformat.md - docs/models/sourcemicrosoftsharepointcsvheaderdefinition.md + - docs/models/sourcemicrosoftsharepointdeliverymethod.md + - docs/models/sourcemicrosoftsharepointdeliverytype.md - docs/models/sourcemicrosoftsharepointexcelformat.md - docs/models/sourcemicrosoftsharepointfilebasedstreamconfig.md - docs/models/sourcemicrosoftsharepointfiletype.md @@ -1976,7 +2244,9 @@ generatedFiles: - docs/models/sourcemicrosoftsharepointparquetformat.md - docs/models/sourcemicrosoftsharepointparsingstrategy.md - docs/models/sourcemicrosoftsharepointprocessing.md + - docs/models/sourcemicrosoftsharepointreplicaterecords.md - docs/models/sourcemicrosoftsharepointschemasauthtype.md + - docs/models/sourcemicrosoftsharepointschemasdeliverytype.md - docs/models/sourcemicrosoftsharepointschemasfiletype.md - docs/models/sourcemicrosoftsharepointschemasheaderdefinitiontype.md - docs/models/sourcemicrosoftsharepointschemasstreamsfiletype.md @@ -2002,6 +2272,7 @@ generatedFiles: - docs/models/sourcemixpanelregion.md - docs/models/sourcemixpanelschemasoptiontitle.md - docs/models/sourcemode.md + - docs/models/sourcemodemode.md - docs/models/sourcemonday.md - docs/models/sourcemondayauthorizationmethod.md - docs/models/sourcemondayauthtype.md @@ -2033,30 +2304,54 @@ generatedFiles: - docs/models/sourcemux.md - docs/models/sourcemyhours.md - docs/models/sourcemysql.md + - docs/models/sourcemysqlencryption.md - docs/models/sourcemysqlinvalidcdcpositionbehavioradvanced.md - docs/models/sourcemysqlmethod.md - docs/models/sourcemysqlmode.md - docs/models/sourcemysqlmysql.md - docs/models/sourcemysqlnotunnel.md - docs/models/sourcemysqlpasswordauthentication.md + - docs/models/sourcemysqlreadchangesusingchangedatacapturecdc.md - docs/models/sourcemysqlscanchangeswithuserdefinedcursor.md - docs/models/sourcemysqlschemasmethod.md - docs/models/sourcemysqlschemasmode.md + - docs/models/sourcemysqlschemassslmodeencryptionmode.md - docs/models/sourcemysqlschemassslmodemode.md - - docs/models/sourcemysqlschemassslmodesslmodesmode.md - docs/models/sourcemysqlschemastunnelmethod.md - docs/models/sourcemysqlschemastunnelmethodtunnelmethod.md - docs/models/sourcemysqlsshkeyauthentication.md - docs/models/sourcemysqlsshtunnelmethod.md - - docs/models/sourcemysqlsslmodes.md - docs/models/sourcemysqltunnelmethod.md - docs/models/sourcemysqlupdatemethod.md - docs/models/sourcemysqlverifyca.md - docs/models/sourcen8n.md - docs/models/sourcenasa.md + - docs/models/sourcenavan.md + - docs/models/sourcenebiusai.md - docs/models/sourcenetsuite.md + - docs/models/sourcenetsuiteenterprise.md + - docs/models/sourcenetsuiteenterpriseauthenticationmethod.md + - docs/models/sourcenetsuiteenterprisenotunnel.md + - docs/models/sourcenetsuiteenterprisepasswordauthentication.md + - docs/models/sourcenetsuiteenterprisescanchangeswithuserdefinedcursor.md + - docs/models/sourcenetsuiteenterpriseschemasauthenticationmethod.md + - docs/models/sourcenetsuiteenterpriseschemasauthenticationmethodauthenticationmethod.md + - docs/models/sourcenetsuiteenterpriseschemasauthenticationmethodauthenticationmethodauthenticationmethod.md + - docs/models/sourcenetsuiteenterpriseschemaspasswordauthentication.md + - docs/models/sourcenetsuiteenterpriseschemastunnelmethod.md + - docs/models/sourcenetsuiteenterpriseschemastunnelmethodtunnelmethod.md + - docs/models/sourcenetsuiteenterprisesshkeyauthentication.md + - docs/models/sourcenetsuiteenterprisesshtunnelmethod.md + - docs/models/sourcenetsuiteenterprisetunnelmethod.md + - docs/models/sourcenetsuiteenterpriseupdatemethod.md - docs/models/sourcenewsapi.md + - docs/models/sourcenewsdata.md + - docs/models/sourcenewsdatacategory.md + - docs/models/sourcenewsdatacountry.md - docs/models/sourcenewsdataio.md + - docs/models/sourcenewsdatalanguage.md + - docs/models/sourcenexiopay.md + - docs/models/sourceninjaonermm.md - docs/models/sourcenocrm.md - docs/models/sourcenorthpasslms.md - docs/models/sourcenotion.md @@ -2083,15 +2378,44 @@ generatedFiles: - docs/models/sourceonfleet.md - docs/models/sourceopenaq.md - docs/models/sourceopendatadc.md + - docs/models/sourceopenexchangerates.md - docs/models/sourceopenfda.md - docs/models/sourceopenweather.md - docs/models/sourceopinionstage.md - docs/models/sourceopsgenie.md + - docs/models/sourceopuswatch.md - docs/models/sourceoracle.md - docs/models/sourceoracleconnectiontype.md - docs/models/sourceoracleencryption.md - docs/models/sourceoracleencryptionalgorithm.md - docs/models/sourceoracleencryptionmethod.md + - docs/models/sourceoracleenterprise.md + - docs/models/sourceoracleenterpriseconnectby.md + - docs/models/sourceoracleenterpriseconnectiontype.md + - docs/models/sourceoracleenterprisecursormethod.md + - docs/models/sourceoracleenterpriseencryption.md + - docs/models/sourceoracleenterpriseencryptionalgorithm.md + - docs/models/sourceoracleenterpriseencryptionmethod.md + - docs/models/sourceoracleenterpriseinvalidcdcpositionbehavioradvanced.md + - docs/models/sourceoracleenterprisenativenetworkencryptionnne.md + - docs/models/sourceoracleenterprisenotunnel.md + - docs/models/sourceoracleenterprisepasswordauthentication.md + - docs/models/sourceoracleenterprisereadchangesusingchangedatacapturecdc.md + - docs/models/sourceoracleenterprisescanchangeswithuserdefinedcursor.md + - docs/models/sourceoracleenterpriseschemasconnectiontype.md + - docs/models/sourceoracleenterpriseschemascursormethod.md + - docs/models/sourceoracleenterpriseschemasencryptionencryptionmethod.md + - docs/models/sourceoracleenterpriseschemasencryptionmethod.md + - docs/models/sourceoracleenterpriseschemastunnelmethod.md + - docs/models/sourceoracleenterpriseschemastunnelmethodtunnelmethod.md + - docs/models/sourceoracleenterpriseservicename.md + - docs/models/sourceoracleenterprisesshkeyauthentication.md + - docs/models/sourceoracleenterprisesshtunnelmethod.md + - docs/models/sourceoracleenterprisesystemidsid.md + - docs/models/sourceoracleenterprisetlsencryptedverifycertificate.md + - docs/models/sourceoracleenterprisetunnelmethod.md + - docs/models/sourceoracleenterpriseunencrypted.md + - docs/models/sourceoracleenterpriseupdatemethod.md - docs/models/sourceoraclenativenetworkencryptionnne.md - docs/models/sourceoraclenotunnel.md - docs/models/sourceoracleoracle.md @@ -2106,7 +2430,6 @@ generatedFiles: - docs/models/sourceoracletunnelmethod.md - docs/models/sourceoracleunencrypted.md - docs/models/sourceorb.md - - docs/models/sourceorbit.md - docs/models/sourceoura.md - docs/models/sourceoutbrainamplify.md - docs/models/sourceoutbrainamplifyaccesstoken.md @@ -2115,19 +2438,29 @@ generatedFiles: - docs/models/sourceoutreach.md - docs/models/sourceoveit.md - docs/models/sourcepabblysubscriptionsbilling.md + - docs/models/sourcepaddle.md + - docs/models/sourcepaddleenvironment.md + - docs/models/sourcepagerduty.md - docs/models/sourcepandadoc.md - docs/models/sourcepaperform.md - docs/models/sourcepapersign.md - docs/models/sourcepardot.md + - docs/models/sourcepartnerize.md + - docs/models/sourcepartnerstack.md - docs/models/sourcepatchrequest.md + - docs/models/sourcepayfit.md - docs/models/sourcepaypaltransaction.md - docs/models/sourcepaystack.md - docs/models/sourcependo.md - docs/models/sourcepennylane.md + - docs/models/sourceperigon.md - docs/models/sourcepersistiq.md - docs/models/sourcepersona.md - docs/models/sourcepexelsapi.md + - docs/models/sourcephyllo.md + - docs/models/sourcephylloenvironment.md - docs/models/sourcepicqer.md + - docs/models/sourcepingdom.md - docs/models/sourcepinterest.md - docs/models/sourcepinterestauthmethod.md - docs/models/sourcepinterestlevel.md @@ -2145,6 +2478,7 @@ generatedFiles: - docs/models/sourcepocketsortby.md - docs/models/sourcepokeapi.md - docs/models/sourcepolygonstockapi.md + - docs/models/sourcepoplar.md - docs/models/sourcepostgres.md - docs/models/sourcepostgresallow.md - docs/models/sourcepostgresdisable.md @@ -2178,15 +2512,14 @@ generatedFiles: - docs/models/sourceprestashop.md - docs/models/sourcepretix.md - docs/models/sourceprimetric.md + - docs/models/sourceprintify.md - docs/models/sourceproductboard.md - docs/models/sourceproductive.md - docs/models/sourceputrequest.md - docs/models/sourcepypi.md - docs/models/sourcequalaroo.md - docs/models/sourcequickbooks.md - - docs/models/sourcequickbooksauthorizationmethod.md - docs/models/sourcequickbooksauthtype.md - - docs/models/sourcequickbooksoauth20.md - docs/models/sourcerailz.md - docs/models/sourcerdstationmarketing.md - docs/models/sourcerdstationmarketingauthenticationtype.md @@ -2204,6 +2537,7 @@ generatedFiles: - docs/models/sourcerepairshopr.md - docs/models/sourcereplyio.md - docs/models/sourceresponse.md + - docs/models/sourceretailexpressbymaropost.md - docs/models/sourceretently.md - docs/models/sourceretentlyauthenticationmechanism.md - docs/models/sourceretentlyauthtype.md @@ -2211,7 +2545,9 @@ generatedFiles: - docs/models/sourcerevenuecat.md - docs/models/sourcerevolutmerchant.md - docs/models/sourcerevolutmerchantenvironment.md + - docs/models/sourceringcentral.md - docs/models/sourcerkicovid.md + - docs/models/sourcerocketchat.md - docs/models/sourcerocketlane.md - docs/models/sourcerollbar.md - docs/models/sourcerootly.md @@ -2220,8 +2556,10 @@ generatedFiles: - docs/models/sources3.md - docs/models/sources3autogenerated.md - docs/models/sources3avroformat.md + - docs/models/sources3copyrawfiles.md - docs/models/sources3csvformat.md - docs/models/sources3csvheaderdefinition.md + - docs/models/sources3deliverymethod.md - docs/models/sources3deliverytype.md - docs/models/sources3excelformat.md - docs/models/sources3filebasedstreamconfig.md @@ -2235,7 +2573,9 @@ generatedFiles: - docs/models/sources3parquetformat.md - docs/models/sources3parsingstrategy.md - docs/models/sources3processing.md + - docs/models/sources3replicaterecords.md - docs/models/sources3s3.md + - docs/models/sources3schemasdeliverytype.md - docs/models/sources3schemasfiletype.md - docs/models/sources3schemasheaderdefinitiontype.md - docs/models/sources3schemasstreamsfiletype.md @@ -2250,12 +2590,35 @@ generatedFiles: - docs/models/sourcesagehr.md - docs/models/sourcesalesflare.md - docs/models/sourcesalesforce.md + - docs/models/sourcesalesforceauthtype.md - docs/models/sourcesalesforcesalesforce.md - docs/models/sourcesalesloft.md - docs/models/sourcesalesloftauthtype.md - docs/models/sourcesalesloftcredentials.md - docs/models/sourcesalesloftschemasauthtype.md - docs/models/sourcesapfieldglass.md + - docs/models/sourcesaphanaenterprise.md + - docs/models/sourcesaphanaenterprisecursormethod.md + - docs/models/sourcesaphanaenterpriseencryption.md + - docs/models/sourcesaphanaenterpriseencryptionalgorithm.md + - docs/models/sourcesaphanaenterpriseencryptionmethod.md + - docs/models/sourcesaphanaenterpriseinvalidcdcpositionbehavioradvanced.md + - docs/models/sourcesaphanaenterprisenativenetworkencryptionnne.md + - docs/models/sourcesaphanaenterprisenotunnel.md + - docs/models/sourcesaphanaenterprisepasswordauthentication.md + - docs/models/sourcesaphanaenterprisereadchangesusingchangedatacapturecdc.md + - docs/models/sourcesaphanaenterprisescanchangeswithuserdefinedcursor.md + - docs/models/sourcesaphanaenterpriseschemascursormethod.md + - docs/models/sourcesaphanaenterpriseschemasencryptionencryptionmethod.md + - docs/models/sourcesaphanaenterpriseschemasencryptionmethod.md + - docs/models/sourcesaphanaenterpriseschemastunnelmethod.md + - docs/models/sourcesaphanaenterpriseschemastunnelmethodtunnelmethod.md + - docs/models/sourcesaphanaenterprisesshkeyauthentication.md + - docs/models/sourcesaphanaenterprisesshtunnelmethod.md + - docs/models/sourcesaphanaenterprisetlsencryptedverifycertificate.md + - docs/models/sourcesaphanaenterprisetunnelmethod.md + - docs/models/sourcesaphanaenterpriseunencrypted.md + - docs/models/sourcesaphanaenterpriseupdatemethod.md - docs/models/sourcesavvycal.md - docs/models/sourcescryfall.md - docs/models/sourcesecoda.md @@ -2266,6 +2629,8 @@ generatedFiles: - docs/models/sourcesendpulse.md - docs/models/sourcesenseforce.md - docs/models/sourcesentry.md + - docs/models/sourceserpstat.md + - docs/models/sourceservicenow.md - docs/models/sourcesftp.md - docs/models/sourcesftpauthentication.md - docs/models/sourcesftpauthmethod.md @@ -2310,16 +2675,61 @@ generatedFiles: - docs/models/sourcesftppasswordauthentication.md - docs/models/sourcesftpschemasauthmethod.md - docs/models/sourcesftpsshkeyauthentication.md + - docs/models/sourcesharepointenterprise.md + - docs/models/sourcesharepointenterpriseauthenticateviamicrosoftoauth.md + - docs/models/sourcesharepointenterpriseauthentication.md + - docs/models/sourcesharepointenterpriseauthtype.md + - docs/models/sourcesharepointenterpriseautogenerated.md + - docs/models/sourcesharepointenterpriseavroformat.md + - docs/models/sourcesharepointenterprisecopyrawfiles.md + - docs/models/sourcesharepointenterprisecsvformat.md + - docs/models/sourcesharepointenterprisecsvheaderdefinition.md + - docs/models/sourcesharepointenterprisedeliverymethod.md + - docs/models/sourcesharepointenterprisedeliverytype.md + - docs/models/sourcesharepointenterpriseexcelformat.md + - docs/models/sourcesharepointenterprisefilebasedstreamconfig.md + - docs/models/sourcesharepointenterprisefiletype.md + - docs/models/sourcesharepointenterpriseformat.md + - docs/models/sourcesharepointenterprisefromcsv.md + - docs/models/sourcesharepointenterpriseheaderdefinitiontype.md + - docs/models/sourcesharepointenterprisejsonlformat.md + - docs/models/sourcesharepointenterpriselocal.md + - docs/models/sourcesharepointenterprisemode.md + - docs/models/sourcesharepointenterpriseparquetformat.md + - docs/models/sourcesharepointenterpriseparsingstrategy.md + - docs/models/sourcesharepointenterpriseprocessing.md + - docs/models/sourcesharepointenterprisereplicatepermissionsacl.md + - docs/models/sourcesharepointenterprisereplicaterecords.md + - docs/models/sourcesharepointenterpriseschemasauthtype.md + - docs/models/sourcesharepointenterpriseschemasdeliverymethoddeliverytype.md + - docs/models/sourcesharepointenterpriseschemasdeliverytype.md + - docs/models/sourcesharepointenterpriseschemasfiletype.md + - docs/models/sourcesharepointenterpriseschemasheaderdefinitiontype.md + - docs/models/sourcesharepointenterpriseschemasstreamsfiletype.md + - docs/models/sourcesharepointenterpriseschemasstreamsformatfiletype.md + - docs/models/sourcesharepointenterpriseschemasstreamsformatformat6filetype.md + - docs/models/sourcesharepointenterpriseschemasstreamsformatformatfiletype.md + - docs/models/sourcesharepointenterpriseschemasstreamsheaderdefinitiontype.md + - docs/models/sourcesharepointenterprisesearchscope.md + - docs/models/sourcesharepointenterpriseservicekeyauthentication.md + - docs/models/sourcesharepointenterprisesharepointenterprise.md + - docs/models/sourcesharepointenterpriseunstructureddocumentformat.md + - docs/models/sourcesharepointenterpriseuserprovided.md + - docs/models/sourcesharepointenterprisevalidationpolicy.md - docs/models/sourcesharetribe.md - docs/models/sourceshippo.md + - docs/models/sourceshipstation.md - docs/models/sourceshopify.md - docs/models/sourceshopifyauthmethod.md - docs/models/sourceshopifyoauth20.md - docs/models/sourceshopifyschemasauthmethod.md - docs/models/sourceshopifyshopify.md + - docs/models/sourceshopwired.md - docs/models/sourceshortcut.md - docs/models/sourceshortio.md + - docs/models/sourceshutterstock.md - docs/models/sourcesigmacomputing.md + - docs/models/sourcesignnow.md - docs/models/sourcesimfin.md - docs/models/sourcesimplecast.md - docs/models/sourcesimplesat.md @@ -2344,17 +2754,19 @@ generatedFiles: - docs/models/sourcesnowflake.md - docs/models/sourcesnowflakeauthorizationmethod.md - docs/models/sourcesnowflakeauthtype.md + - docs/models/sourcesnowflakecursormethod.md - docs/models/sourcesnowflakekeypairauthentication.md - - docs/models/sourcesnowflakeoauth20.md + - docs/models/sourcesnowflakescanchangeswithuserdefinedcursor.md - docs/models/sourcesnowflakeschemasauthtype.md - - docs/models/sourcesnowflakeschemascredentialsauthtype.md - docs/models/sourcesnowflakesnowflake.md + - docs/models/sourcesnowflakeupdatemethod.md - docs/models/sourcesnowflakeusernameandpassword.md - docs/models/sourcesolarwindsservicedesk.md - docs/models/sourcesonarcloud.md - docs/models/sourcespacexapi.md - docs/models/sourcesparkpost.md - docs/models/sourcesplitio.md + - docs/models/sourcespotifyads.md - docs/models/sourcespotlercrm.md - docs/models/sourcesquare.md - docs/models/sourcesquareapikey.md @@ -2375,14 +2787,18 @@ generatedFiles: - docs/models/sourcesurveysparrow.md - docs/models/sourcesurveysparrowurlbase.md - docs/models/sourcesurvicate.md + - docs/models/sourcesvix.md - docs/models/sourcesysteme.md - docs/models/sourcetaboola.md + - docs/models/sourcetavus.md - docs/models/sourceteamtailor.md - docs/models/sourceteamwork.md - docs/models/sourcetempo.md - docs/models/sourcetestrail.md - docs/models/sourcetheguardianapi.md - docs/models/sourcethinkific.md + - docs/models/sourcethinkificcourses.md + - docs/models/sourcethrivelearning.md - docs/models/sourceticketmaster.md - docs/models/sourcetickettailor.md - docs/models/sourcetiktokmarketing.md @@ -2393,7 +2809,9 @@ generatedFiles: - docs/models/sourcetiktokmarketingtiktokmarketing.md - docs/models/sourcetimely.md - docs/models/sourcetinyemail.md + - docs/models/sourcetmdb.md - docs/models/sourcetodoist.md + - docs/models/sourcetoggl.md - docs/models/sourcetrackpms.md - docs/models/sourcetrello.md - docs/models/sourcetremendous.md @@ -2406,9 +2824,11 @@ generatedFiles: - docs/models/sourcetrustpilotschemasauthtype.md - docs/models/sourcetvmazeschedule.md - docs/models/sourcetwelvedata.md + - docs/models/sourcetwelvedatainterval.md - docs/models/sourcetwilio.md - docs/models/sourcetwiliotaskrouter.md - docs/models/sourcetwitter.md + - docs/models/sourcetyntecsms.md - docs/models/sourcetypeform.md - docs/models/sourcetypeformauthorizationmethod.md - docs/models/sourcetypeformauthtype.md @@ -2419,6 +2839,7 @@ generatedFiles: - docs/models/sourceubidots.md - docs/models/sourceunleash.md - docs/models/sourceuppromote.md + - docs/models/sourceuptick.md - docs/models/sourceuscensus.md - docs/models/sourceuservoice.md - docs/models/sourcevantage.md @@ -2430,6 +2851,7 @@ generatedFiles: - docs/models/sourcevwo.md - docs/models/sourcewaiteraid.md - docs/models/sourcewasabistatsapi.md + - docs/models/sourcewatchmode.md - docs/models/sourceweatherstack.md - docs/models/sourcewebflow.md - docs/models/sourcewebscrapper.md @@ -2439,6 +2861,8 @@ generatedFiles: - docs/models/sourcewoocommerce.md - docs/models/sourcewordpress.md - docs/models/sourceworkable.md + - docs/models/sourceworkday.md + - docs/models/sourceworkdayauthentication.md - docs/models/sourceworkflowmax.md - docs/models/sourceworkramp.md - docs/models/sourcewrike.md @@ -2450,17 +2874,20 @@ generatedFiles: - docs/models/sourceyandexmetrica.md - docs/models/sourceyotpo.md - docs/models/sourceyouneedabudgetynab.md + - docs/models/sourceyounium.md + - docs/models/sourceyousign.md + - docs/models/sourceyousignsubdomain.md - docs/models/sourceyoutubeanalytics.md - docs/models/sourceyoutubeanalyticsyoutubeanalytics.md - docs/models/sourceyoutubedata.md - docs/models/sourcezapiersupportedstorage.md + - docs/models/sourcezapsign.md - docs/models/sourcezendeskchat.md - docs/models/sourcezendeskchataccesstoken.md - docs/models/sourcezendeskchatauthorizationmethod.md - docs/models/sourcezendeskchatcredentials.md - docs/models/sourcezendeskchatoauth20.md - docs/models/sourcezendeskchatschemascredentials.md - - docs/models/sourcezendeskchatzendeskchat.md - docs/models/sourcezendesksunshine.md - docs/models/sourcezendesksunshineapitoken.md - docs/models/sourcezendesksunshineauthmethod.md @@ -2506,6 +2933,7 @@ generatedFiles: - docs/models/spacexapi.md - docs/models/sparkpost.md - docs/models/splitio.md + - docs/models/spotifyads.md - docs/models/spotlercrm.md - docs/models/sqlinserts.md - docs/models/square.md @@ -2517,35 +2945,46 @@ generatedFiles: - docs/models/sslmodes.md - docs/models/standalonemongodbinstance.md - docs/models/state.md - - docs/models/statefilterenum.md - docs/models/statisticsinterval.md - docs/models/statsig.md - docs/models/status.md - docs/models/statuspage.md - docs/models/stockdata.md - docs/models/storage.md - - docs/models/storageconfig.md - docs/models/storageprovider.md - docs/models/storagetype.md - docs/models/strategies.md - docs/models/strava.md - docs/models/streamconfiguration.md - docs/models/streamconfigurations.md + - docs/models/streamconfigurationsinput.md - docs/models/streammappertype.md + - docs/models/streamnameoverrides.md - docs/models/streamproperties.md - docs/models/streamscriteria.md - docs/models/stringfilter.md - docs/models/stripe.md + - docs/models/subdomain.md + - docs/models/subtitleformat.md + - docs/models/surrealdb.md - docs/models/surveymonkey.md - docs/models/surveymonkeyauthorizationmethod.md - docs/models/surveymonkeycredentials.md - docs/models/surveysparrow.md - docs/models/survicate.md + - docs/models/svix.md - docs/models/swipeupattributionwindow.md - docs/models/systeme.md - docs/models/systemidsid.md - docs/models/taboola.md + - docs/models/tag.md + - docs/models/tagcreaterequest.md + - docs/models/tagpatchrequest.md + - docs/models/tagresponse.md + - docs/models/tagsresponse.md - docs/models/targetstype.md + - docs/models/tavus.md + - docs/models/td2.md - docs/models/teamtailor.md - docs/models/teamwork.md - docs/models/technicalindicatortype.md @@ -2556,7 +2995,10 @@ generatedFiles: - docs/models/testrail.md - docs/models/textsplitter.md - docs/models/theguardianapi.md + - docs/models/thetargetedactionresourceforthefetch.md - docs/models/thinkific.md + - docs/models/thinkificcourses.md + - docs/models/thrivelearning.md - docs/models/throttled.md - docs/models/ticketmaster.md - docs/models/tickettailor.md @@ -2570,13 +3012,16 @@ generatedFiles: - docs/models/timely.md - docs/models/timeperiod.md - docs/models/timeplus.md + - docs/models/timezone.md - docs/models/tinyemail.md - docs/models/tlsencryptedverifycertificate.md + - docs/models/tmdb.md - docs/models/todoist.md + - docs/models/toggl.md + - docs/models/tokenbasedauthentication.md - docs/models/topheadlinestopic.md - docs/models/tovalue.md - docs/models/trackpms.md - - docs/models/transformationqueryruntype.md - docs/models/trello.md - docs/models/tremendous.md - docs/models/trustpilot.md @@ -2586,6 +3031,8 @@ generatedFiles: - docs/models/twilio.md - docs/models/twiliotaskrouter.md - docs/models/twitter.md + - docs/models/tyntecsms.md + - docs/models/type.md - docs/models/typeform.md - docs/models/typeformcredentials.md - docs/models/typesense.md @@ -2595,9 +3042,12 @@ generatedFiles: - docs/models/units.md - docs/models/unleash.md - docs/models/unstructureddocumentformat.md + - docs/models/updatedeclarativesourcedefinitionrequest.md + - docs/models/updatedefinitionrequest.md - docs/models/updatemethod.md - docs/models/uploadingmethod.md - docs/models/uppromote.md + - docs/models/uptick.md - docs/models/urlbase.md - docs/models/urlregion.md - docs/models/uscensus.md @@ -2631,9 +3081,11 @@ generatedFiles: - docs/models/vwo.md - docs/models/waiteraid.md - docs/models/wasabistatsapi.md + - docs/models/watchmode.md - docs/models/weatherstack.md - docs/models/weaviate.md - docs/models/webflow.md + - docs/models/webhooknotificationconfig.md - docs/models/webscrapper.md - docs/models/wheniwork.md - docs/models/whiskyhunter.md @@ -2641,6 +3093,7 @@ generatedFiles: - docs/models/woocommerce.md - docs/models/wordpress.md - docs/models/workable.md + - docs/models/workday.md - docs/models/workflowmax.md - docs/models/workramp.md - docs/models/workspacecreaterequest.md @@ -2658,12 +3111,14 @@ generatedFiles: - docs/models/yellowbrick.md - docs/models/yotpo.md - docs/models/youneedabudgetynab.md + - docs/models/younium.md + - docs/models/yousign.md - docs/models/youtubeanalytics.md - docs/models/youtubeanalyticscredentials.md - docs/models/youtubedata.md - docs/models/zapiersupportedstorage.md + - docs/models/zapsign.md - docs/models/zendeskchat.md - - docs/models/zendeskchatcredentials.md - docs/models/zendesksunshine.md - docs/models/zendesksupport.md - docs/models/zendesksupportcredentials.md @@ -2687,13 +3142,17 @@ generatedFiles: - docs/models/zstandard.md - docs/sdks/airbyteapi/README.md - docs/sdks/connections/README.md + - docs/sdks/declarativesourcedefinitions/README.md + - docs/sdks/destinationdefinitions/README.md - docs/sdks/destinations/README.md - docs/sdks/health/README.md - docs/sdks/jobs/README.md - docs/sdks/organizations/README.md - docs/sdks/permissions/README.md + - docs/sdks/sourcedefinitions/README.md - docs/sdks/sources/README.md - docs/sdks/streams/README.md + - docs/sdks/tags/README.md - docs/sdks/users/README.md - docs/sdks/workspaces/README.md - py.typed @@ -2708,32 +3167,49 @@ generatedFiles: - src/airbyte_api/api/__init__.py - src/airbyte_api/api/canceljob.py - src/airbyte_api/api/createconnection.py + - src/airbyte_api/api/createdeclarativesourcedefinition.py - src/airbyte_api/api/createdestination.py + - src/airbyte_api/api/createdestinationdefinition.py - src/airbyte_api/api/createjob.py + - src/airbyte_api/api/createorupdateorganizationoauthcredentials.py - src/airbyte_api/api/createorupdateworkspaceoauthcredentials.py - src/airbyte_api/api/createpermission.py - src/airbyte_api/api/createsource.py + - src/airbyte_api/api/createsourcedefinition.py + - src/airbyte_api/api/createtag.py - src/airbyte_api/api/createworkspace.py - src/airbyte_api/api/deleteconnection.py + - src/airbyte_api/api/deletedeclarativesourcedefinition.py - src/airbyte_api/api/deletedestination.py + - src/airbyte_api/api/deletedestinationdefinition.py - src/airbyte_api/api/deletepermission.py - src/airbyte_api/api/deletesource.py + - src/airbyte_api/api/deletesourcedefinition.py + - src/airbyte_api/api/deletetag.py - src/airbyte_api/api/deleteworkspace.py - src/airbyte_api/api/getconnection.py + - src/airbyte_api/api/getdeclarativesourcedefinition.py - src/airbyte_api/api/getdestination.py + - src/airbyte_api/api/getdestinationdefinition.py - src/airbyte_api/api/gethealthcheck.py - src/airbyte_api/api/getjob.py - src/airbyte_api/api/getpermission.py - src/airbyte_api/api/getsource.py + - src/airbyte_api/api/getsourcedefinition.py - src/airbyte_api/api/getstreamproperties.py + - src/airbyte_api/api/gettag.py - src/airbyte_api/api/getworkspace.py - src/airbyte_api/api/initiateoauth.py - src/airbyte_api/api/listconnections.py + - src/airbyte_api/api/listdeclarativesourcedefinitions.py + - src/airbyte_api/api/listdestinationdefinitions.py - src/airbyte_api/api/listdestinations.py - src/airbyte_api/api/listjobs.py - src/airbyte_api/api/listorganizationsforuser.py - src/airbyte_api/api/listpermissions.py + - src/airbyte_api/api/listsourcedefinitions.py - src/airbyte_api/api/listsources.py + - src/airbyte_api/api/listtags.py - src/airbyte_api/api/listuserswithinanorganization.py - src/airbyte_api/api/listworkspaces.py - src/airbyte_api/api/patchconnection.py @@ -2741,9 +3217,15 @@ generatedFiles: - src/airbyte_api/api/patchsource.py - src/airbyte_api/api/putdestination.py - src/airbyte_api/api/putsource.py + - src/airbyte_api/api/updatedeclarativesourcedefinition.py + - src/airbyte_api/api/updatedestinationdefinition.py - src/airbyte_api/api/updatepermission.py + - src/airbyte_api/api/updatesourcedefinition.py + - src/airbyte_api/api/updatetag.py - src/airbyte_api/api/updateworkspace.py - src/airbyte_api/connections.py + - src/airbyte_api/declarativesourcedefinitions.py + - src/airbyte_api/destinationdefinitions.py - src/airbyte_api/destinations.py - src/airbyte_api/errors/__init__.py - src/airbyte_api/errors/sdkerror.py @@ -2766,13 +3248,21 @@ generatedFiles: - src/airbyte_api/models/connectionsresponse.py - src/airbyte_api/models/connectionstatusenum.py - src/airbyte_api/models/connectionsyncmodeenum.py + - src/airbyte_api/models/createdeclarativesourcedefinitionrequest.py + - src/airbyte_api/models/createdefinitionrequest.py + - src/airbyte_api/models/declarativesourcedefinitionresponse.py + - src/airbyte_api/models/declarativesourcedefinitionsresponse.py + - src/airbyte_api/models/definitionresponse.py + - src/airbyte_api/models/definitionsresponse.py - src/airbyte_api/models/destination_astra.py - src/airbyte_api/models/destination_aws_datalake.py - src/airbyte_api/models/destination_azure_blob_storage.py - src/airbyte_api/models/destination_bigquery.py - src/airbyte_api/models/destination_clickhouse.py - src/airbyte_api/models/destination_convex.py + - src/airbyte_api/models/destination_customer_io.py - src/airbyte_api/models/destination_databricks.py + - src/airbyte_api/models/destination_deepset.py - src/airbyte_api/models/destination_dev_null.py - src/airbyte_api/models/destination_duckdb.py - src/airbyte_api/models/destination_dynamodb.py @@ -2781,11 +3271,12 @@ generatedFiles: - src/airbyte_api/models/destination_firestore.py - src/airbyte_api/models/destination_gcs.py - src/airbyte_api/models/destination_google_sheets.py - - src/airbyte_api/models/destination_iceberg.py + - src/airbyte_api/models/destination_hubspot.py - src/airbyte_api/models/destination_milvus.py - src/airbyte_api/models/destination_mongodb.py - src/airbyte_api/models/destination_motherduck.py - src/airbyte_api/models/destination_mssql.py + - src/airbyte_api/models/destination_mssql_v2.py - src/airbyte_api/models/destination_mysql.py - src/airbyte_api/models/destination_oracle.py - src/airbyte_api/models/destination_pgvector.py @@ -2796,10 +3287,12 @@ generatedFiles: - src/airbyte_api/models/destination_redis.py - src/airbyte_api/models/destination_redshift.py - src/airbyte_api/models/destination_s3.py - - src/airbyte_api/models/destination_s3_glue.py + - src/airbyte_api/models/destination_s3_data_lake.py + - src/airbyte_api/models/destination_salesforce.py - src/airbyte_api/models/destination_sftp_json.py - src/airbyte_api/models/destination_snowflake.py - src/airbyte_api/models/destination_snowflake_cortex.py + - src/airbyte_api/models/destination_surrealdb.py - src/airbyte_api/models/destination_teradata.py - src/airbyte_api/models/destination_timeplus.py - src/airbyte_api/models/destination_typesense.py @@ -2813,10 +3306,10 @@ generatedFiles: - src/airbyte_api/models/destinationresponse.py - src/airbyte_api/models/destinationsresponse.py - src/airbyte_api/models/drift.py + - src/airbyte_api/models/emailnotificationconfig.py + - src/airbyte_api/models/encryptionmapperalgorithm.py - src/airbyte_api/models/facebook_marketing.py - src/airbyte_api/models/gcs.py - - src/airbyte_api/models/geographyenum.py - - src/airbyte_api/models/geographyenumnodefault.py - src/airbyte_api/models/github.py - src/airbyte_api/models/gitlab.py - src/airbyte_api/models/google_ads.py @@ -2827,12 +3320,13 @@ generatedFiles: - src/airbyte_api/models/hubspot.py - src/airbyte_api/models/initiateoauthrequest.py - src/airbyte_api/models/instagram.py - - src/airbyte_api/models/intercom.py - src/airbyte_api/models/jobcreaterequest.py - src/airbyte_api/models/jobresponse.py - src/airbyte_api/models/jobsresponse.py - src/airbyte_api/models/jobstatusenum.py + - src/airbyte_api/models/jobtype.py - src/airbyte_api/models/jobtypeenum.py + - src/airbyte_api/models/jobtyperesourcelimit.py - src/airbyte_api/models/lever_hiring.py - src/airbyte_api/models/linkedin_ads.py - src/airbyte_api/models/mailchimp.py @@ -2845,9 +3339,11 @@ generatedFiles: - src/airbyte_api/models/namespacedefinitionenumnodefault.py - src/airbyte_api/models/nonbreakingschemaupdatesbehaviorenum.py - src/airbyte_api/models/nonbreakingschemaupdatesbehaviorenumnodefault.py + - src/airbyte_api/models/notificationconfig.py + - src/airbyte_api/models/notificationsconfig.py - src/airbyte_api/models/notion.py - src/airbyte_api/models/oauthactornames.py - - src/airbyte_api/models/oauthcredentialsconfiguration.py + - src/airbyte_api/models/organizationoauthcredentialsrequest.py - src/airbyte_api/models/organizationresponse.py - src/airbyte_api/models/organizationsresponse.py - src/airbyte_api/models/permissioncreaterequest.py @@ -2860,18 +3356,23 @@ generatedFiles: - src/airbyte_api/models/pinterest.py - src/airbyte_api/models/publicpermissiontype.py - src/airbyte_api/models/rd_station_marketing.py + - src/airbyte_api/models/resourcerequirements.py + - src/airbyte_api/models/rowfilteringoperation.py + - src/airbyte_api/models/rowfilteringoperationtype.py - src/airbyte_api/models/salesforce.py - src/airbyte_api/models/scheduletypeenum.py - src/airbyte_api/models/scheduletypewithbasicenum.py - src/airbyte_api/models/schemebasicauth.py - src/airbyte_api/models/schemeclientcredentials.py + - src/airbyte_api/models/scopedresourcerequirements.py - src/airbyte_api/models/security.py - src/airbyte_api/models/selectedfieldinfo.py + - src/airbyte_api/models/sharepoint_enterprise.py - src/airbyte_api/models/shopify.py - src/airbyte_api/models/slack.py - src/airbyte_api/models/smartsheets.py - src/airbyte_api/models/snapchat_marketing.py - - src/airbyte_api/models/snowflake.py + - src/airbyte_api/models/source_100ms.py - src/airbyte_api/models/source_7shifts.py - src/airbyte_api/models/source_activecampaign.py - src/airbyte_api/models/source_agilecrm.py @@ -2881,6 +3382,8 @@ generatedFiles: - src/airbyte_api/models/source_airtable.py - src/airbyte_api/models/source_akeneo.py - src/airbyte_api/models/source_algolia.py + - src/airbyte_api/models/source_alpaca_broker_api.py + - src/airbyte_api/models/source_alpha_vantage.py - src/airbyte_api/models/source_amazon_ads.py - src/airbyte_api/models/source_amazon_seller_partner.py - src/airbyte_api/models/source_amazon_sqs.py @@ -2890,13 +3393,18 @@ generatedFiles: - src/airbyte_api/models/source_appfigures.py - src/airbyte_api/models/source_appfollow.py - src/airbyte_api/models/source_apple_search_ads.py + - src/airbyte_api/models/source_appsflyer.py - src/airbyte_api/models/source_apptivo.py - src/airbyte_api/models/source_asana.py - src/airbyte_api/models/source_ashby.py + - src/airbyte_api/models/source_assemblyai.py - src/airbyte_api/models/source_auth0.py + - src/airbyte_api/models/source_aviationstack.py + - src/airbyte_api/models/source_awin_advertiser.py - src/airbyte_api/models/source_aws_cloudtrail.py - src/airbyte_api/models/source_azure_blob_storage.py - src/airbyte_api/models/source_azure_table.py + - src/airbyte_api/models/source_babelforce.py - src/airbyte_api/models/source_bamboo_hr.py - src/airbyte_api/models/source_basecamp.py - src/airbyte_api/models/source_beamer.py @@ -2905,9 +3413,12 @@ generatedFiles: - src/airbyte_api/models/source_bing_ads.py - src/airbyte_api/models/source_bitly.py - src/airbyte_api/models/source_blogger.py + - src/airbyte_api/models/source_bluetally.py + - src/airbyte_api/models/source_boldsign.py - src/airbyte_api/models/source_box.py - src/airbyte_api/models/source_braintree.py - src/airbyte_api/models/source_braze.py + - src/airbyte_api/models/source_breezometer.py - src/airbyte_api/models/source_breezy_hr.py - src/airbyte_api/models/source_brevo.py - src/airbyte_api/models/source_brex.py @@ -2922,6 +3433,7 @@ generatedFiles: - src/airbyte_api/models/source_campayn.py - src/airbyte_api/models/source_canny.py - src/airbyte_api/models/source_capsule_crm.py + - src/airbyte_api/models/source_captain_data.py - src/airbyte_api/models/source_care_quality_commission.py - src/airbyte_api/models/source_cart.py - src/airbyte_api/models/source_castor_edc.py @@ -2930,10 +3442,12 @@ generatedFiles: - src/airbyte_api/models/source_chargedesk.py - src/airbyte_api/models/source_chargify.py - src/airbyte_api/models/source_chartmogul.py + - src/airbyte_api/models/source_churnkey.py - src/airbyte_api/models/source_cimis.py - src/airbyte_api/models/source_cin7.py - src/airbyte_api/models/source_circa.py - src/airbyte_api/models/source_circleci.py + - src/airbyte_api/models/source_cisco_meraki.py - src/airbyte_api/models/source_clarif_ai.py - src/airbyte_api/models/source_clazar.py - src/airbyte_api/models/source_clickhouse.py @@ -2946,6 +3460,7 @@ generatedFiles: - src/airbyte_api/models/source_coda.py - src/airbyte_api/models/source_codefresh.py - src/airbyte_api/models/source_coin_api.py + - src/airbyte_api/models/source_coingecko_coins.py - src/airbyte_api/models/source_coinmarketcap.py - src/airbyte_api/models/source_concord.py - src/airbyte_api/models/source_configcat.py @@ -2953,35 +3468,48 @@ generatedFiles: - src/airbyte_api/models/source_convertkit.py - src/airbyte_api/models/source_convex.py - src/airbyte_api/models/source_copper.py + - src/airbyte_api/models/source_couchbase.py - src/airbyte_api/models/source_countercyclical.py - src/airbyte_api/models/source_customer_io.py + - src/airbyte_api/models/source_customerly.py - src/airbyte_api/models/source_datadog.py - src/airbyte_api/models/source_datascope.py - src/airbyte_api/models/source_dbt.py - src/airbyte_api/models/source_delighted.py - src/airbyte_api/models/source_deputy.py + - src/airbyte_api/models/source_ding_connect.py - src/airbyte_api/models/source_dixa.py - src/airbyte_api/models/source_dockerhub.py + - src/airbyte_api/models/source_docuseal.py + - src/airbyte_api/models/source_dolibarr.py - src/airbyte_api/models/source_dremio.py - src/airbyte_api/models/source_drift.py - src/airbyte_api/models/source_drip.py - src/airbyte_api/models/source_dropbox_sign.py + - src/airbyte_api/models/source_dwolla.py - src/airbyte_api/models/source_dynamodb.py - src/airbyte_api/models/source_e_conomic.py - src/airbyte_api/models/source_easypost.py - src/airbyte_api/models/source_easypromos.py + - src/airbyte_api/models/source_ebay_finance.py + - src/airbyte_api/models/source_ebay_fulfillment.py - src/airbyte_api/models/source_elasticemail.py + - src/airbyte_api/models/source_elasticsearch.py - src/airbyte_api/models/source_emailoctopus.py - src/airbyte_api/models/source_employment_hero.py - src/airbyte_api/models/source_encharge.py - src/airbyte_api/models/source_eventbrite.py - src/airbyte_api/models/source_eventee.py - src/airbyte_api/models/source_eventzilla.py + - src/airbyte_api/models/source_everhour.py - src/airbyte_api/models/source_exchange_rates.py - src/airbyte_api/models/source_ezofficeinventory.py - src/airbyte_api/models/source_facebook_marketing.py + - src/airbyte_api/models/source_facebook_pages.py - src/airbyte_api/models/source_factorial.py - src/airbyte_api/models/source_faker.py + - src/airbyte_api/models/source_fastbill.py + - src/airbyte_api/models/source_fastly.py - src/airbyte_api/models/source_fauna.py - src/airbyte_api/models/source_file.py - src/airbyte_api/models/source_fillout.py @@ -3007,10 +3535,12 @@ generatedFiles: - src/airbyte_api/models/source_freshservice.py - src/airbyte_api/models/source_front.py - src/airbyte_api/models/source_fulcrum.py + - src/airbyte_api/models/source_fullstory.py - src/airbyte_api/models/source_gainsight_px.py - src/airbyte_api/models/source_gcs.py - src/airbyte_api/models/source_getgist.py - src/airbyte_api/models/source_getlago.py + - src/airbyte_api/models/source_giphy.py - src/airbyte_api/models/source_gitbook.py - src/airbyte_api/models/source_github.py - src/airbyte_api/models/source_gitlab.py @@ -3019,6 +3549,7 @@ generatedFiles: - src/airbyte_api/models/source_gnews.py - src/airbyte_api/models/source_gocardless.py - src/airbyte_api/models/source_goldcast.py + - src/airbyte_api/models/source_gologin.py - src/airbyte_api/models/source_gong.py - src/airbyte_api/models/source_google_ads.py - src/airbyte_api/models/source_google_analytics_data_api.py @@ -3034,35 +3565,48 @@ generatedFiles: - src/airbyte_api/models/source_google_webfonts.py - src/airbyte_api/models/source_gorgias.py - src/airbyte_api/models/source_greenhouse.py + - src/airbyte_api/models/source_greythr.py - src/airbyte_api/models/source_gridly.py - src/airbyte_api/models/source_guru.py - src/airbyte_api/models/source_gutendex.py - src/airbyte_api/models/source_hardcoded_records.py + - src/airbyte_api/models/source_harness.py - src/airbyte_api/models/source_harvest.py - src/airbyte_api/models/source_height.py + - src/airbyte_api/models/source_hellobaton.py + - src/airbyte_api/models/source_help_scout.py - src/airbyte_api/models/source_hibob.py - src/airbyte_api/models/source_high_level.py + - src/airbyte_api/models/source_hoorayhr.py - src/airbyte_api/models/source_hubplanner.py - src/airbyte_api/models/source_hubspot.py + - src/airbyte_api/models/source_hugging_face_datasets.py - src/airbyte_api/models/source_humanitix.py + - src/airbyte_api/models/source_huntr.py - src/airbyte_api/models/source_illumina_basespace.py + - src/airbyte_api/models/source_imagga.py - src/airbyte_api/models/source_incident_io.py - src/airbyte_api/models/source_inflowinventory.py + - src/airbyte_api/models/source_insightful.py - src/airbyte_api/models/source_insightly.py - src/airbyte_api/models/source_instagram.py - src/airbyte_api/models/source_instatus.py - src/airbyte_api/models/source_intercom.py + - src/airbyte_api/models/source_intruder.py - src/airbyte_api/models/source_invoiced.py - src/airbyte_api/models/source_invoiceninja.py - src/airbyte_api/models/source_ip2whois.py - src/airbyte_api/models/source_iterable.py + - src/airbyte_api/models/source_jamf_pro.py - src/airbyte_api/models/source_jira.py - src/airbyte_api/models/source_jobnimbus.py - src/airbyte_api/models/source_jotform.py + - src/airbyte_api/models/source_judge_me_reviews.py - src/airbyte_api/models/source_just_sift.py - src/airbyte_api/models/source_justcall.py - src/airbyte_api/models/source_k6_cloud.py - src/airbyte_api/models/source_katana.py + - src/airbyte_api/models/source_keka.py - src/airbyte_api/models/source_kisi.py - src/airbyte_api/models/source_kissmetrics.py - src/airbyte_api/models/source_klarna.py @@ -3075,6 +3619,7 @@ generatedFiles: - src/airbyte_api/models/source_less_annoying_crm.py - src/airbyte_api/models/source_lever_hiring.py - src/airbyte_api/models/source_lightspeed_retail.py + - src/airbyte_api/models/source_linear.py - src/airbyte_api/models/source_linkedin_ads.py - src/airbyte_api/models/source_linkedin_pages.py - src/airbyte_api/models/source_linnworks.py @@ -3084,6 +3629,7 @@ generatedFiles: - src/airbyte_api/models/source_luma.py - src/airbyte_api/models/source_mailchimp.py - src/airbyte_api/models/source_mailerlite.py + - src/airbyte_api/models/source_mailersend.py - src/airbyte_api/models/source_mailgun.py - src/airbyte_api/models/source_mailjet_mail.py - src/airbyte_api/models/source_mailjet_sms.py @@ -3091,7 +3637,10 @@ generatedFiles: - src/airbyte_api/models/source_mailtrap.py - src/airbyte_api/models/source_marketo.py - src/airbyte_api/models/source_marketstack.py + - src/airbyte_api/models/source_mendeley.py - src/airbyte_api/models/source_mention.py + - src/airbyte_api/models/source_mercado_ads.py + - src/airbyte_api/models/source_merge.py - src/airbyte_api/models/source_metabase.py - src/airbyte_api/models/source_microsoft_dataverse.py - src/airbyte_api/models/source_microsoft_entra_id.py @@ -3112,9 +3661,15 @@ generatedFiles: - src/airbyte_api/models/source_mysql.py - src/airbyte_api/models/source_n8n.py - src/airbyte_api/models/source_nasa.py + - src/airbyte_api/models/source_navan.py + - src/airbyte_api/models/source_nebius_ai.py - src/airbyte_api/models/source_netsuite.py + - src/airbyte_api/models/source_netsuite_enterprise.py - src/airbyte_api/models/source_news_api.py + - src/airbyte_api/models/source_newsdata.py - src/airbyte_api/models/source_newsdata_io.py + - src/airbyte_api/models/source_nexiopay.py + - src/airbyte_api/models/source_ninjaone_rmm.py - src/airbyte_api/models/source_nocrm.py - src/airbyte_api/models/source_northpass_lms.py - src/airbyte_api/models/source_notion.py @@ -3128,31 +3683,41 @@ generatedFiles: - src/airbyte_api/models/source_onesignal.py - src/airbyte_api/models/source_onfleet.py - src/airbyte_api/models/source_open_data_dc.py + - src/airbyte_api/models/source_open_exchange_rates.py - src/airbyte_api/models/source_openaq.py - src/airbyte_api/models/source_openfda.py - src/airbyte_api/models/source_openweather.py - src/airbyte_api/models/source_opinion_stage.py - src/airbyte_api/models/source_opsgenie.py + - src/airbyte_api/models/source_opuswatch.py - src/airbyte_api/models/source_oracle.py + - src/airbyte_api/models/source_oracle_enterprise.py - src/airbyte_api/models/source_orb.py - - src/airbyte_api/models/source_orbit.py - src/airbyte_api/models/source_oura.py - src/airbyte_api/models/source_outbrain_amplify.py - src/airbyte_api/models/source_outreach.py - src/airbyte_api/models/source_oveit.py - src/airbyte_api/models/source_pabbly_subscriptions_billing.py + - src/airbyte_api/models/source_paddle.py + - src/airbyte_api/models/source_pagerduty.py - src/airbyte_api/models/source_pandadoc.py - src/airbyte_api/models/source_paperform.py - src/airbyte_api/models/source_papersign.py - src/airbyte_api/models/source_pardot.py + - src/airbyte_api/models/source_partnerize.py + - src/airbyte_api/models/source_partnerstack.py + - src/airbyte_api/models/source_payfit.py - src/airbyte_api/models/source_paypal_transaction.py - src/airbyte_api/models/source_paystack.py - src/airbyte_api/models/source_pendo.py - src/airbyte_api/models/source_pennylane.py + - src/airbyte_api/models/source_perigon.py - src/airbyte_api/models/source_persistiq.py - src/airbyte_api/models/source_persona.py - src/airbyte_api/models/source_pexels_api.py + - src/airbyte_api/models/source_phyllo.py - src/airbyte_api/models/source_picqer.py + - src/airbyte_api/models/source_pingdom.py - src/airbyte_api/models/source_pinterest.py - src/airbyte_api/models/source_pipedrive.py - src/airbyte_api/models/source_pipeliner.py @@ -3164,12 +3729,14 @@ generatedFiles: - src/airbyte_api/models/source_pocket.py - src/airbyte_api/models/source_pokeapi.py - src/airbyte_api/models/source_polygon_stock_api.py + - src/airbyte_api/models/source_poplar.py - src/airbyte_api/models/source_postgres.py - src/airbyte_api/models/source_posthog.py - src/airbyte_api/models/source_postmarkapp.py - src/airbyte_api/models/source_prestashop.py - src/airbyte_api/models/source_pretix.py - src/airbyte_api/models/source_primetric.py + - src/airbyte_api/models/source_printify.py - src/airbyte_api/models/source_productboard.py - src/airbyte_api/models/source_productive.py - src/airbyte_api/models/source_pypi.py @@ -3187,10 +3754,13 @@ generatedFiles: - src/airbyte_api/models/source_rentcast.py - src/airbyte_api/models/source_repairshopr.py - src/airbyte_api/models/source_reply_io.py + - src/airbyte_api/models/source_retailexpress_by_maropost.py - src/airbyte_api/models/source_retently.py - src/airbyte_api/models/source_revenuecat.py - src/airbyte_api/models/source_revolut_merchant.py + - src/airbyte_api/models/source_ringcentral.py - src/airbyte_api/models/source_rki_covid.py + - src/airbyte_api/models/source_rocket_chat.py - src/airbyte_api/models/source_rocketlane.py - src/airbyte_api/models/source_rollbar.py - src/airbyte_api/models/source_rootly.py @@ -3203,6 +3773,7 @@ generatedFiles: - src/airbyte_api/models/source_salesforce.py - src/airbyte_api/models/source_salesloft.py - src/airbyte_api/models/source_sap_fieldglass.py + - src/airbyte_api/models/source_sap_hana_enterprise.py - src/airbyte_api/models/source_savvycal.py - src/airbyte_api/models/source_scryfall.py - src/airbyte_api/models/source_secoda.py @@ -3213,14 +3784,21 @@ generatedFiles: - src/airbyte_api/models/source_sendpulse.py - src/airbyte_api/models/source_senseforce.py - src/airbyte_api/models/source_sentry.py + - src/airbyte_api/models/source_serpstat.py + - src/airbyte_api/models/source_service_now.py - src/airbyte_api/models/source_sftp.py - src/airbyte_api/models/source_sftp_bulk.py + - src/airbyte_api/models/source_sharepoint_enterprise.py - src/airbyte_api/models/source_sharetribe.py - src/airbyte_api/models/source_shippo.py + - src/airbyte_api/models/source_shipstation.py - src/airbyte_api/models/source_shopify.py + - src/airbyte_api/models/source_shopwired.py - src/airbyte_api/models/source_shortcut.py - src/airbyte_api/models/source_shortio.py + - src/airbyte_api/models/source_shutterstock.py - src/airbyte_api/models/source_sigma_computing.py + - src/airbyte_api/models/source_signnow.py - src/airbyte_api/models/source_simfin.py - src/airbyte_api/models/source_simplecast.py - src/airbyte_api/models/source_simplesat.py @@ -3237,6 +3815,7 @@ generatedFiles: - src/airbyte_api/models/source_spacex_api.py - src/airbyte_api/models/source_sparkpost.py - src/airbyte_api/models/source_split_io.py + - src/airbyte_api/models/source_spotify_ads.py - src/airbyte_api/models/source_spotlercrm.py - src/airbyte_api/models/source_square.py - src/airbyte_api/models/source_squarespace.py @@ -3248,20 +3827,26 @@ generatedFiles: - src/airbyte_api/models/source_survey_sparrow.py - src/airbyte_api/models/source_surveymonkey.py - src/airbyte_api/models/source_survicate.py + - src/airbyte_api/models/source_svix.py - src/airbyte_api/models/source_systeme.py - src/airbyte_api/models/source_taboola.py + - src/airbyte_api/models/source_tavus.py - src/airbyte_api/models/source_teamtailor.py - src/airbyte_api/models/source_teamwork.py - src/airbyte_api/models/source_tempo.py - src/airbyte_api/models/source_testrail.py - src/airbyte_api/models/source_the_guardian_api.py - src/airbyte_api/models/source_thinkific.py + - src/airbyte_api/models/source_thinkific_courses.py + - src/airbyte_api/models/source_thrive_learning.py - src/airbyte_api/models/source_ticketmaster.py - src/airbyte_api/models/source_tickettailor.py - src/airbyte_api/models/source_tiktok_marketing.py - src/airbyte_api/models/source_timely.py - src/airbyte_api/models/source_tinyemail.py + - src/airbyte_api/models/source_tmdb.py - src/airbyte_api/models/source_todoist.py + - src/airbyte_api/models/source_toggl.py - src/airbyte_api/models/source_track_pms.py - src/airbyte_api/models/source_trello.py - src/airbyte_api/models/source_tremendous.py @@ -3271,10 +3856,12 @@ generatedFiles: - src/airbyte_api/models/source_twilio.py - src/airbyte_api/models/source_twilio_taskrouter.py - src/airbyte_api/models/source_twitter.py + - src/airbyte_api/models/source_tyntec_sms.py - src/airbyte_api/models/source_typeform.py - src/airbyte_api/models/source_ubidots.py - src/airbyte_api/models/source_unleash.py - src/airbyte_api/models/source_uppromote.py + - src/airbyte_api/models/source_uptick.py - src/airbyte_api/models/source_us_census.py - src/airbyte_api/models/source_uservoice.py - src/airbyte_api/models/source_vantage.py @@ -3285,6 +3872,7 @@ generatedFiles: - src/airbyte_api/models/source_vwo.py - src/airbyte_api/models/source_waiteraid.py - src/airbyte_api/models/source_wasabi_stats_api.py + - src/airbyte_api/models/source_watchmode.py - src/airbyte_api/models/source_weatherstack.py - src/airbyte_api/models/source_web_scrapper.py - src/airbyte_api/models/source_webflow.py @@ -3294,6 +3882,7 @@ generatedFiles: - src/airbyte_api/models/source_woocommerce.py - src/airbyte_api/models/source_wordpress.py - src/airbyte_api/models/source_workable.py + - src/airbyte_api/models/source_workday.py - src/airbyte_api/models/source_workflowmax.py - src/airbyte_api/models/source_workramp.py - src/airbyte_api/models/source_wrike.py @@ -3304,9 +3893,12 @@ generatedFiles: - src/airbyte_api/models/source_yandex_metrica.py - src/airbyte_api/models/source_yotpo.py - src/airbyte_api/models/source_you_need_a_budget_ynab.py + - src/airbyte_api/models/source_younium.py + - src/airbyte_api/models/source_yousign.py - src/airbyte_api/models/source_youtube_analytics.py - src/airbyte_api/models/source_youtube_data.py - src/airbyte_api/models/source_zapier_supported_storage.py + - src/airbyte_api/models/source_zapsign.py - src/airbyte_api/models/source_zendesk_chat.py - src/airbyte_api/models/source_zendesk_sunshine.py - src/airbyte_api/models/source_zendesk_support.py @@ -3333,68 +3925,183 @@ generatedFiles: - src/airbyte_api/models/sourcesresponse.py - src/airbyte_api/models/streamconfiguration.py - src/airbyte_api/models/streamconfigurations.py + - src/airbyte_api/models/streamconfigurations_input.py - src/airbyte_api/models/streammappertype.py - src/airbyte_api/models/streamproperties.py - src/airbyte_api/models/surveymonkey.py + - src/airbyte_api/models/tag.py + - src/airbyte_api/models/tagcreaterequest.py + - src/airbyte_api/models/tagpatchrequest.py + - src/airbyte_api/models/tagresponse.py + - src/airbyte_api/models/tagsresponse.py - src/airbyte_api/models/tiktok_marketing.py - src/airbyte_api/models/typeform.py + - src/airbyte_api/models/updatedeclarativesourcedefinitionrequest.py + - src/airbyte_api/models/updatedefinitionrequest.py - src/airbyte_api/models/userresponse.py - src/airbyte_api/models/usersresponse.py + - src/airbyte_api/models/webhooknotificationconfig.py - src/airbyte_api/models/workspacecreaterequest.py - src/airbyte_api/models/workspaceoauthcredentialsrequest.py - src/airbyte_api/models/workspaceresponse.py - src/airbyte_api/models/workspacesresponse.py - src/airbyte_api/models/workspaceupdaterequest.py - src/airbyte_api/models/youtube_analytics.py - - src/airbyte_api/models/zendesk_chat.py - src/airbyte_api/models/zendesk_support.py - src/airbyte_api/models/zendesk_talk.py - src/airbyte_api/organizations.py - src/airbyte_api/permissions.py - src/airbyte_api/sdk.py - src/airbyte_api/sdkconfiguration.py + - src/airbyte_api/sourcedefinitions.py - src/airbyte_api/sources.py - src/airbyte_api/streams.py + - src/airbyte_api/tags.py - src/airbyte_api/users.py - src/airbyte_api/utils/__init__.py - src/airbyte_api/utils/retries.py - src/airbyte_api/utils/utils.py - src/airbyte_api/workspaces.py examples: - cancelJob: - speakeasy-default-cancel-job: + createConnection: + Connection Creation Request Example: + requestBody: + application/json: {"destinationId": "e478de0d-a3a0-475c-b019-25f7dd29e281", "name": "Postgres-to-Bigquery", "namespaceDefinition": "destination", "namespaceFormat": "", "nonBreakingSchemaUpdatesBehavior": "ignore", "prefix": "", "sourceId": "95e66a59-8045-4307-9678-63bc3c9b8c93"} + responses: + "200": + application/json: {"configurations": {}, "connectionId": "", "createdAt": 781932, "destinationId": "", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "cron"}, "sourceId": "", "status": "deprecated", "tags": [], "workspaceId": ""} + Connection Creation Response Example: + requestBody: + application/json: {"destinationId": "328ef851-ea7f-4d47-88a5-873822bcbf2d", "namespaceDefinition": "destination", "namespaceFormat": "", "nonBreakingSchemaUpdatesBehavior": "ignore", "prefix": "", "sourceId": "d0b7adc3-b029-40e4-a7c6-957e4827afdb"} + responses: + "200": + application/json: {"configurations": {}, "connectionId": "9924bcd0-99be-453d-ba47-c2c9766f7da5", "createdAt": 640872, "destinationId": "", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "manual"}, "sourceId": "", "status": "active", "tags": [{"color": "cyan", "name": "", "tagId": "e853523a-fb3a-4e6f-ab07-c3dab5d69d79", "workspaceId": "ebc4c796-c29d-4482-8595-4d13693c59a2"}], "workspaceId": ""} + deleteConnection: + speakeasy-default-delete-connection: parameters: path: - jobId: 801771 + connectionId: "" + getConnection: + Connection Get Response Example: + parameters: + path: + connectionId: "" responses: "200": - application/json: {"connectionId": "", "duration": "PT8H6M12S", "jobId": 439621, "jobType": "sync", "startTime": "2023-03-25T01:30:50Z", "status": "running"} - createConnection: - Connection Creation Request Example: + application/json: {"configurations": {}, "connectionId": "", "createdAt": 934653, "destinationId": "744cc0ed-7f05-4949-9e60-2a814f90c035", "name": "Postgres To Snowflake", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "basic"}, "sourceId": "9924bcd0-99be-453d-ba47-c2c9766f7da5", "status": "deprecated", "tags": [{"color": "plum", "name": "", "tagId": "b4546de2-f321-41c6-9948-142095324d2e", "workspaceId": "17e4951a-e407-4789-8206-b16864ce5f12"}, {"color": "sky blue", "name": "", "tagId": "b4405e44-2947-4837-b244-47b66ef5e8ae", "workspaceId": "918d2ca9-a317-4203-8b6e-805981f43fe2"}], "workspaceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826"} + listConnections: + speakeasy-default-list-connections: + parameters: + query: + includeDeleted: false + limit: 20 + offset: 0 + responses: + "200": + application/json: {"data": [{"configurations": {}, "connectionId": "", "createdAt": 726733, "destinationId": "", "name": "test-connection", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "basic"}, "sourceId": "", "status": "deprecated", "tags": [{"color": "grey", "name": "", "tagId": "a57cb08e-c762-471d-8a7a-04783f69e675", "workspaceId": "3c652149-922c-4e01-aac2-001d6f740af7"}, {"color": "white", "name": "", "tagId": "67a4cae4-cdf0-465b-a6aa-51c0435bd42a", "workspaceId": "ced986f7-e9d0-4438-b0f0-4622226f5515"}, {"color": "white", "name": "", "tagId": "6e726826-84d9-475b-9843-2b202970fa01", "workspaceId": "2f301cba-1d84-41d1-9873-2c8d3acb53bb"}], "workspaceId": ""}, {"configurations": {}, "connectionId": "", "createdAt": 808076, "destinationId": "", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "basic"}, "sourceId": "", "status": "inactive", "tags": [], "workspaceId": ""}, {"configurations": {}, "connectionId": "", "createdAt": 884379, "destinationId": "", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "manual"}, "sourceId": "49237019-645d-47d4-b45b-5eddf97775ce", "status": "active", "tags": [{"color": "orange", "name": "", "tagId": "164a228e-17f2-4cbb-832a-554182adc8da", "workspaceId": "6207024d-c903-4e50-8969-f94237c9d7c9"}, {"color": "gold", "name": "", "tagId": "b0cc902b-acff-4def-85ce-25c37e2027ce", "workspaceId": "52bb088a-99ac-49b4-93c4-fdb8fe8d3612"}, {"color": "maroon", "name": "", "tagId": "af798c5c-737f-47ad-95ce-eb405bab6ad1", "workspaceId": "ec04abc4-49b4-4a61-a77f-6c0dd2c2ed68"}], "workspaceId": ""}, {"configurations": {}, "connectionId": "", "createdAt": 584545, "destinationId": "al312fs-0ab1-4f72-9ed7-0b8fc27c5826", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "cron"}, "sourceId": "", "status": "inactive", "tags": [{"color": "teal", "name": "", "tagId": "e17d727d-7971-4e63-bc8a-8443f551b94a", "workspaceId": "40a42bea-9fa7-49d3-858c-d28e26b5d262"}, {"color": "tan", "name": "", "tagId": "7f8a0fc7-e3ff-45a9-bc05-4fb599f3003f", "workspaceId": "2a86dca0-6682-477b-b194-3225cfb3db50"}, {"color": "red", "name": "", "tagId": "15955c87-4dcd-4f1e-8d4c-c9a2ad68d233", "workspaceId": "4564af4a-f7b0-407b-8201-5ce5c0aa5c24"}], "workspaceId": ""}, {"configurations": {}, "connectionId": "", "createdAt": 821882, "destinationId": "", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "manual"}, "sourceId": "", "status": "active", "tags": [], "workspaceId": ""}, {"configurations": {}, "connectionId": "", "createdAt": 105968, "destinationId": "", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "basic"}, "sourceId": "", "status": "active", "tags": [], "workspaceId": ""}], "next": "https://api.airbyte.com/v1/connections?limit=5&offset=10", "previous": "https://api.airbyte.com/v1/connections?limit=5&offset=0"} + patchConnection: + Connection Update Request Example: + parameters: + path: + connectionId: "" requestBody: - application/json: {"destinationId": "e478de0d-a3a0-475c-b019-25f7dd29e281", "name": "Postgres-to-Bigquery", "namespaceFormat": "${SOURCE_NAMESPACE}", "sourceId": "95e66a59-8045-4307-9678-63bc3c9b8c93"} + application/json: {"name": "Postgres-to-Bigquery", "namespaceFormat": ""} responses: "200": - application/json: {"connectionId": "", "createdAt": 781932, "destinationId": "", "name": "", "schedule": {"scheduleType": "basic"}, "sourceId": "", "status": "deprecated", "workspaceId": ""} - Connection Creation Response Example: + application/json: {"configurations": {}, "connectionId": "", "createdAt": 106227, "destinationId": "", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "basic"}, "sourceId": "", "status": "active", "tags": [], "workspaceId": ""} + Connection Get Response Example: + parameters: + path: + connectionId: "" requestBody: - application/json: {"destinationId": "c669dd1e-3620-483e-afc8-55914e0a570f", "namespaceFormat": "${SOURCE_NAMESPACE}", "sourceId": "6dd427d8-3a55-4584-b835-842325b6c7b3"} + application/json: {"namespaceFormat": ""} responses: "200": - application/json: {"connectionId": "9924bcd0-99be-453d-ba47-c2c9766f7da5", "createdAt": 416535, "destinationId": "", "name": "", "schedule": {"scheduleType": "manual"}, "sourceId": "", "status": "deprecated", "workspaceId": ""} + application/json: {"configurations": {}, "connectionId": "", "createdAt": 287886, "destinationId": "744cc0ed-7f05-4949-9e60-2a814f90c035", "name": "Postgres To Snowflake", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "manual"}, "sourceId": "9924bcd0-99be-453d-ba47-c2c9766f7da5", "status": "active", "tags": [{"color": "sky blue", "name": "", "tagId": "2532230c-083c-40b4-8513-a87f320dcb52", "workspaceId": "73f7fe79-5e52-4d4f-a4a0-e68d60f6f5f8"}, {"color": "red", "name": "", "tagId": "761f7386-b693-45f8-a46c-01122647425f", "workspaceId": "e8035a4c-bbbe-4aca-9e68-2a0b466b2ded"}], "workspaceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826"} createDestination: Destination Creation Request Example: requestBody: - application/json: {"configuration": {"indexing": {"credentials": {"password": "AIRBYTE_PASSWORD"}, "database": "AIRBYTE_DATABASE", "default_schema": "AIRBYTE_SCHEMA", "host": "AIRBYTE_ACCOUNT", "port": "5432", "username": "AIRBYTE_USER"}, "processing": {"chunk_size": 540943, "metadata_fields": ["age"], "text_fields": ["users.*.name"]}}, "name": "Postgres", "workspaceId": "2155ae5a-de39-4808-af6a-16fe7b8b4ed2"} + application/json: {"configuration": {"host": "instructive-mainstream.com", "port": 1521, "schema": "airbyte", "sid": "", "username": "Robert.Legros98"}, "name": "Postgres", "workspaceId": "2155ae5a-de39-4808-af6a-16fe7b8b4ed2"} responses: "200": - application/json: {"configuration": {"indexing": {"auth": {"password": "0SAaZ1kxEg7h49V", "username": "Sonny.Miller"}, "collection": "", "host": "tcp://my-local-milvus:19530"}, "processing": {"chunk_size": 727665, "metadata_fields": ["age"], "text_fields": ["text"]}}, "createdAt": 656314, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "postgres", "name": "Analytics Team Postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} + application/json: {"configuration": {"host": "grizzled-planula.com", "port": 1521, "schema": "airbyte", "sid": "", "username": "Lempi78"}, "createdAt": 971525, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "postgres", "name": "Analytics Team Postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} Destination Creation Response Example: requestBody: - application/json: {"configuration": {"credential": {"hmac_key_access_id": "1234567890abcdefghij1234", "hmac_key_secret": "1234567890abcdefghij1234567890ABCDEFGHIJ"}, "gcs_bucket_name": "airbyte_sync", "gcs_bucket_path": "data_sync/test"}, "name": "", "workspaceId": "ad46e8e6-2f62-408e-9ba5-019ef3492fc0"} + application/json: {"configuration": {"credentials": {"client_id": "", "client_secret": "", "refresh_token": "", "type": "OAuth"}}, "name": "", "workspaceId": "b3c34f9e-3902-48cb-a03a-59b4797bfc7d"} responses: "200": - application/json: {"configuration": {"s3_bucket_name": "airbyte_sync", "s3_bucket_path": "data_sync/test"}, "createdAt": 471392, "definitionId": "", "destinationId": "af0c3c67-aa61-419f-8922-95b0bf840e86", "destinationType": "", "name": "", "workspaceId": ""} + application/json: {"configuration": {"cache_type": "hash", "host": "localhost,127.0.0.1", "port": 6379, "ssl": false, "username": "Gunner_Considine73"}, "createdAt": 616066, "definitionId": "", "destinationId": "af0c3c67-aa61-419f-8922-95b0bf840e86", "destinationType": "", "name": "", "workspaceId": ""} + deleteDestination: + speakeasy-default-delete-destination: + parameters: + path: + destinationId: "" + getDestination: + Destination Get Response Example: + parameters: + path: + destinationId: "" + query: {} + responses: + "200": + application/json: {"configuration": {"database": "", "disable_type_dedupe": false, "host": "cooperative-brochure.biz", "port": 3306, "ssl": true, "username": "Colt_Greenfelder"}, "createdAt": 243454, "definitionId": "", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "", "name": "My Destination", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} + listDestinations: + speakeasy-default-list-destinations: + parameters: + query: + includeDeleted: false + limit: 20 + offset: 0 + responses: + "200": + application/json: {"data": [{"configuration": {"client_id": "", "client_secret": "", "is_sandbox": false, "refresh_token": ""}, "createdAt": 607289, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "postgres", "name": "Analytics Team Postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"}], "next": "https://api.airbyte.com/v1/destinations?limit=5&offset=10", "previous": "https://api.airbyte.com/v1/destinations?limit=5&offset=0"} + patchDestination: + Destination Update Request Example: + parameters: + path: + destinationId: "" + requestBody: + application/json: {"configuration": {"test_destination": {"num_messages": 992227, "test_destination_type": "FAILING"}}, "name": "My Destination"} + responses: + "200": + application/json: {"configuration": {"accept_terms": false, "authentication": {"personal_access_token": ""}, "database": "", "hostname": "abc-12345678-wxyz.cloud.databricks.com", "http_path": "sql/1.0/warehouses/0000-1111111-abcd90", "port": "443", "purge_staging_data": true, "raw_schema_override": "airbyte_internal", "schema": "default"}, "createdAt": 708998, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "postgres", "name": "Analytics Team Postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} + Destination Update Response Example: + parameters: + path: + destinationId: "" + requestBody: + application/json: {"configuration": {"api_key": "", "host": "well-made-litter.org"}} + responses: + "200": + application/json: {"configuration": {"access_key_id": "A012345678910EXAMPLE", "dynamodb_endpoint": "", "dynamodb_region": "", "dynamodb_table_name_prefix": "airbyte_sync", "secret_access_key": "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"}, "createdAt": 679016, "definitionId": "", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "", "name": "running", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} + putDestination: + Destination Update Request Example: + parameters: + path: + destinationId: "" + requestBody: + application/json: {"configuration": {"access_key": "", "deployment_url": "https://cluttered-owl-337.convex.cloud"}, "name": "My Destination"} + responses: + "200": + application/json: {"configuration": {"catalog_type": {"catalog_type": "REST", "namespace": "", "server_uri": "https://second-sustenance.org"}, "main_branch_name": "main", "s3_bucket_name": "", "s3_bucket_region": "us-east-1", "warehouse_location": "s3://your-bucket/path/to/store/files/in"}, "createdAt": 488187, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "postgres", "name": "Analytics Team Postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} + Destination Update Response Example: + parameters: + path: + destinationId: "" + requestBody: + application/json: {"configuration": {"project_id": ""}, "name": ""} + responses: + "200": + application/json: {"configuration": {"project_id": ""}, "createdAt": 946510, "definitionId": "", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "", "name": "running", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} + getHealthCheck: {} + cancelJob: + speakeasy-default-cancel-job: + parameters: + path: + jobId: 801771 + responses: + "200": + application/json: {"connectionId": "", "duration": "PT8H6M12S", "jobId": 278686, "jobType": "sync", "startTime": "2023-03-25T01:30:50Z", "status": "running"} createJob: Job Creation Request Example: requestBody: @@ -3408,325 +4115,428 @@ examples: responses: "200": application/json: {"connectionId": "", "jobId": 1234, "jobType": "sync", "startTime": "", "status": "running"} - createOrUpdateWorkspaceOAuthCredentials: - speakeasy-default-create-or-update-workspace-O-auth-credentials: + getJob: + Job Get Response Example: parameters: path: - workspaceId: "" + jobId: 131101 + responses: + "200": + application/json: {"connectionId": "", "jobId": 471405, "jobType": "sync", "startTime": "", "status": "running"} + listJobs: + "": + parameters: + query: + limit: 20 + offset: 0 + createdAtStart: 1687450500000 + createdAtEnd: 1687450500000 + updatedAtStart: 1687450500000 + updatedAtEnd: 1687450500000 + orderBy: "updatedAt|DESC" + responses: + "200": + application/json: {"data": [{"connectionId": "", "jobId": 808104, "jobType": "sync", "startTime": "2023-03-25T01:30:50Z", "status": "running"}], "next": "https://api.airbyte.com/v1/jobs?limit=5&offset=10", "previous": "https://api.airbyte.com/v1/jobs?limit=5&offset=0"} + Job List Response Example: + parameters: + query: + limit: 20 + offset: 0 + createdAtStart: 1687450500000 + createdAtEnd: 1687450500000 + updatedAtStart: 1687450500000 + updatedAtEnd: 1687450500000 + orderBy: "updatedAt|DESC" + responses: + "200": + application/json: {"data": [{"connectionId": "", "jobId": 4995, "jobType": "sync", "startTime": "", "status": "running"}], "next": "https://api.airbyte.com/v1/jobs?limit=5&offset=10", "previous": "https://api.airbyte.com/v1/jobs?limit=5&offset=0"} + createOrUpdateOrganizationOAuthCredentials: + speakeasy-default-create-or-update-organization-O-auth-credentials: + parameters: + path: + organizationId: "" requestBody: - application/json: {"actorType": "destination", "configuration": {"user": "charles"}, "name": "amazon-ads"} + application/json: {"actorType": "source", "configuration": {}, "name": ""} + listOrganizationsForUser: + speakeasy-default-list-organizations-for-user: + responses: + "200": + application/json: {"data": [{"email": "Crystal.Wilkinson@gmail.com", "organizationId": "bb2301b7-877b-4fde-af39-87e9479d84dd", "organizationName": ""}, {"email": "Jennie_Turcotte72@yahoo.com", "organizationId": "064bd610-3f4f-43a8-87c2-8b6239bace54", "organizationName": ""}]} createPermission: Permission Creation Request Example: requestBody: application/json: {"permissionType": "workspace_admin", "userId": "7d08fd6c-531e-4a00-937e-3d355f253e63", "workspaceId": "9924bcd0-99be-453d-ba47-c2c9766f7da5"} responses: "200": - application/json: {"permissionId": "756578e3-7923-4546-a65e-816c2e1a3118", "permissionType": "organization_admin", "userId": "241c67da-1418-479b-a3b1-0769c70bc115"} + application/json: {"permissionId": "29c6a69f-f5c0-4ea9-8676-7e72566e1cea", "permissionType": "organization_member", "userId": "216d117b-3179-47b1-b5ff-4586839a8ca5"} Permission Creation Response Example: requestBody: - application/json: {"permissionType": "organization_editor", "userId": "297cb6ba-4669-43f1-b750-c800e1a1986b"} + application/json: {"permissionType": "workspace_runner", "userId": "6b986e5c-336a-40db-bbce-b07ab4b96d22"} responses: "200": application/json: {"permissionId": "9924bcd0-99be-453d-ba47-c2c9766f7da5", "permissionType": "workspace_admin", "userId": "7d08fd6c-531e-4a00-937e-3d355f253e63"} - createSource: - Source Creation Request Example: - requestBody: - application/json: {"configuration": {"api_key": "", "start_date": "2021-01-01T00:00:00Z"}, "name": "My Source", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} + deletePermission: + speakeasy-default-delete-permission: + parameters: + path: + permissionId: "" + getPermission: + speakeasy-default-get-permission: + parameters: + path: + permissionId: "" responses: "200": - application/json: {"configuration": {"api_key": ""}, "createdAt": 45355, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "name": "Analytics Team Postgres", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} - Source Creation Response Example: + application/json: {"permissionId": "cabecfd2-37c6-4eb2-bcb8-e08391921e2d", "permissionType": "organization_member", "userId": "b50caf9c-9364-4c63-b7d0-206926e8ed64"} + listPermissions: + speakeasy-default-list-permissions: + responses: + "200": + application/json: {"data": [{"permissionId": "3ad9d46a-fbf6-4142-b512-abdbfa31043e", "permissionType": "organization_reader", "scope": "none", "scopeId": "4d0f846f-00ce-47c0-9a66-45253bd14031", "userId": "4957ad3e-d4c9-4d3d-bb33-5b83f3e63c41"}]} + updatePermission: + speakeasy-default-update-permission: + parameters: + path: + permissionId: "" requestBody: - application/json: {"configuration": {"api_key": ""}, "name": "", "workspaceId": "cf0f31f3-ddc9-4848-834b-dfb109056aa6"} + application/json: {"permissionType": "workspace_owner"} responses: "200": - application/json: {"configuration": {"api_key": "", "project_id": "8454.62"}, "createdAt": 782371, "definitionId": "", "name": "", "sourceId": "0c31738c-0b2d-4887-b506-e2cd1c39cc35", "sourceType": "", "workspaceId": ""} - createWorkspace: - Workspace Creation Request Example: + application/json: {"permissionId": "5b7671c5-16bd-4691-a74f-ac5059e6570f", "permissionType": "workspace_editor", "userId": "046f3d8d-80c7-4cd4-acd4-c59bd4fdfc37"} + createSource: + Source Creation Request Example: requestBody: - application/json: {"name": "Company Workspace Name"} + application/json: {"configuration": {"api_key": "", "site_id": "docs.airbyte.com"}, "name": "My Source", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} responses: "200": - application/json: {"name": "", "workspaceId": ""} - Workspace Creation Response Example: + application/json: {"configuration": {"api_key": "", "start_date": "2017-01-25T00:00:00Z"}, "createdAt": 218560, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "name": "Analytics Team Postgres", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} + Source Creation Response Example: requestBody: - application/json: {"name": ""} + application/json: {"configuration": {"access_token": "", "wrike_instance": "app-us2.wrike.com"}, "name": "", "workspaceId": "dc883bf1-95a6-46ef-b9bb-403f120decfe"} responses: "200": - application/json: {"name": "", "workspaceId": "9924bcd0-99be-453d-ba47-c2c9766f7da5"} - deleteConnection: - speakeasy-default-delete-connection: - parameters: - path: - connectionId: "" - deleteDestination: - speakeasy-default-delete-destination: - parameters: - path: - destinationId: "" - deletePermission: - speakeasy-default-delete-permission: - parameters: - path: - permissionId: "" + application/json: {"configuration": {"agreement_grant_token": "", "app_secret_token": ""}, "createdAt": 341415, "definitionId": "", "name": "", "sourceId": "0c31738c-0b2d-4887-b506-e2cd1c39cc35", "sourceType": "", "workspaceId": ""} deleteSource: speakeasy-default-delete-source: parameters: path: sourceId: "" - deleteWorkspace: - speakeasy-default-delete-workspace: + getSource: + Source Get Response Example: parameters: path: - workspaceId: "" - getConnection: - Connection Get Response Example: + sourceId: "" + query: {} + responses: + "200": + application/json: {"configuration": {"api_key": ""}, "createdAt": 659848, "definitionId": "", "name": "running", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} + initiateOAuth: + speakeasy-default-initiate-O-auth: + requestBody: + application/json: {"redirectUrl": "https://cloud.airbyte.io/v1/api/oauth/callback", "sourceType": "facebook-pages", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} + listSources: + "": parameters: - path: - connectionId: "" + query: + workspaceIds: ["df08f6b0-b364-4cc1-9b3f-96f5d2fccfb2,b0796797-de23-4fc7-a5e2-7e131314718c"] + includeDeleted: false + limit: 20 + offset: 0 responses: "200": - application/json: {"connectionId": "", "createdAt": 934653, "destinationId": "744cc0ed-7f05-4949-9e60-2a814f90c035", "name": "Postgres To Snowflake", "schedule": {"scheduleType": "basic"}, "sourceId": "9924bcd0-99be-453d-ba47-c2c9766f7da5", "status": "deprecated", "workspaceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826"} - getDestination: - Destination Get Response Example: + application/json: {"data": [{"configuration": {"access_token": ""}, "createdAt": 855060, "definitionId": "", "name": "Analytics Team Postgres", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"}], "next": "https://api.airbyte.com/v1/sources?limit=5&offset=10", "previous": "https://api.airbyte.com/v1/sources?limit=5&offset=0"} + patchSource: + Source Update Request Example: parameters: path: - destinationId: "" + sourceId: "" + requestBody: + application/json: {"configuration": {"api_key": ""}, "name": "My Source", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} responses: "200": - application/json: {"configuration": {"host": "cooperative-brochure.biz", "sid": "", "username": "Colt_Greenfelder"}, "createdAt": 243454, "definitionId": "", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "", "name": "My Destination", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} - getHealthCheck: - speakeasy-default-get-health-check: {} - getJob: - Job Get Response Example: + application/json: {"configuration": {"credentials": {"client_id": "", "client_secret": "", "refresh_token": ""}}, "createdAt": 183665, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "name": "Analytics Team Postgres", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} + Source Update Response Example: parameters: path: - jobId: 131101 + sourceId: "" + requestBody: + application/json: {"configuration": {"api_key": "", "sub_domain": ""}, "name": "My source"} responses: "200": - application/json: {"connectionId": "", "jobId": 369528, "jobType": "sync", "startTime": "", "status": "running"} - getPermission: - speakeasy-default-get-permission: + application/json: {"configuration": {"domain_id": "", "secret_key": "", "start_date": "2023-07-30T03:43:59.244Z"}, "createdAt": 291381, "definitionId": "", "name": "running", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} + putSource: + Source Update Request Example: parameters: path: - permissionId: "" + sourceId: "" + requestBody: + application/json: {"configuration": {"api_key": "", "grid_id": ""}, "name": "My Source"} responses: "200": - application/json: {"permissionId": "cea2b5ef-cafa-4d22-a3a7-ccd6aedb824c", "permissionType": "workspace_owner", "userId": "b28fe60f-8634-4971-a922-b1ae92ad8eb7"} - getSource: - Source Get Response Example: + application/json: {"configuration": {"email": "Annabell_Williamson@gmail.com", "password": "1CuU7W6lTkgQ_XF"}, "createdAt": 22579, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "name": "Analytics Team Postgres", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} + Source Update Response Example: parameters: path: sourceId: "" + requestBody: + application/json: {"configuration": {"api_key": "", "user_email": ""}, "name": ""} responses: "200": - application/json: {"configuration": {"credentials": {"access_token": ""}}, "createdAt": 227873, "definitionId": "", "name": "running", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} + application/json: {"configuration": {"api_key": "", "start_date": "2024-12-28T19:32:08.794Z"}, "createdAt": 142182, "definitionId": "", "name": "running", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} getStreamProperties: speakeasy-default-get-stream-properties: parameters: query: sourceId: "" + ignoreCache: false responses: "200": - application/json: [] - getWorkspace: - Workspace Get Response Example: + application/json: [{}] + createTag: + speakeasy-default-create-tag: + requestBody: + application/json: {"color": "blue", "name": "", "workspaceId": "5f85d5ab-c889-4273-91d7-c22bac981db2"} + responses: + "200": + application/json: {"color": "FF5733", "name": "Analytics Team", "tagId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} + deleteTag: + speakeasy-default-delete-tag: parameters: path: - workspaceId: "" + tagId: "da1c4fd4-2786-4b27-8b72-2335c85a5af8" + getTag: + speakeasy-default-get-tag: + parameters: + path: + tagId: "808ab48f-5790-47fe-aa1e-3073281a0300" responses: "200": - application/json: {"dataResidency": "auto", "name": "Acme Company", "workspaceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826"} - initiateOAuth: - speakeasy-default-initiate-O-auth: - requestBody: - application/json: {"redirectUrl": "https://cloud.airbyte.io/v1/api/oauth/callback", "sourceType": "gitlab", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} - listConnections: - speakeasy-default-list-connections: + application/json: {"color": "FF5733", "name": "Analytics Team", "tagId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} + listTags: + speakeasy-default-list-tags: responses: "200": - application/json: {"data": [{"connectionId": "", "createdAt": 726733, "destinationId": "", "name": "test-connection", "schedule": {"scheduleType": "basic"}, "sourceId": "", "status": "deprecated", "workspaceId": ""}, {"connectionId": "", "createdAt": 909205, "destinationId": "", "name": "", "schedule": {"scheduleType": "manual"}, "sourceId": "", "status": "inactive", "workspaceId": ""}, {"connectionId": "", "createdAt": 374233, "destinationId": "", "name": "", "schedule": {"scheduleType": "cron"}, "sourceId": "49237019-645d-47d4-b45b-5eddf97775ce", "status": "deprecated", "workspaceId": ""}, {"connectionId": "", "createdAt": 731924, "destinationId": "al312fs-0ab1-4f72-9ed7-0b8fc27c5826", "name": "", "schedule": {"scheduleType": "manual"}, "sourceId": "", "status": "inactive", "workspaceId": ""}, {"connectionId": "", "createdAt": 901846, "destinationId": "", "name": "", "schedule": {"scheduleType": "manual"}, "sourceId": "", "status": "deprecated", "workspaceId": ""}, {"connectionId": "", "createdAt": 475200, "destinationId": "", "name": "", "schedule": {"scheduleType": "cron"}, "sourceId": "", "status": "active", "workspaceId": ""}, {"connectionId": "", "createdAt": 143014, "dataResidency": "auto", "destinationId": "", "name": "", "schedule": {"scheduleType": "cron"}, "sourceId": "", "status": "active", "workspaceId": ""}], "next": "https://api.airbyte.com/v1/connections?limit=5&offset=10", "previous": "https://api.airbyte.com/v1/connections?limit=5&offset=0"} - listDestinations: - speakeasy-default-list-destinations: + application/json: {"data": [{"color": "FF5733", "name": "Analytics Team", "tagId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"}]} + updateTag: + speakeasy-default-update-tag: + parameters: + path: + tagId: "3043493e-7596-4d2b-8ee9-859838c615f6" + requestBody: + application/json: {"color": "turquoise", "name": ""} responses: "200": - application/json: {"data": [{"configuration": {"glue_database": "airbyte_database", "s3_bucket_name": "airbyte_sync", "s3_bucket_path": "data_sync/test"}, "createdAt": 607289, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "postgres", "name": "Analytics Team Postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"}], "next": "https://api.airbyte.com/v1/destinations?limit=5&offset=10", "previous": "https://api.airbyte.com/v1/destinations?limit=5&offset=0"} - listJobs: - "": + application/json: {"color": "FF5733", "name": "Analytics Team", "tagId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} + listUsersWithinAnOrganization: + speakeasy-default-list-users-within-an-organization: parameters: query: - createdAtEnd: 1687450500000 - createdAtStart: 1687450500000 - orderBy: "updatedAt|DESC" - updatedAtEnd: 1687450500000 - updatedAtStart: 1687450500000 + organizationId: "" responses: "200": - application/json: {"data": [{"connectionId": "", "jobId": 808104, "jobType": "sync", "startTime": "2023-03-25T01:30:50Z", "status": "running"}], "next": "https://api.airbyte.com/v1/jobs?limit=5&offset=10", "previous": "https://api.airbyte.com/v1/jobs?limit=5&offset=0"} - Job List Response Example: + application/json: {"data": [{"email": "Kira.McGlynn@yahoo.com", "id": "b6f8375a-116b-4e31-b20b-6d2c6e47e56a", "name": ""}]} + createOrUpdateWorkspaceOAuthCredentials: + speakeasy-default-create-or-update-workspace-O-auth-credentials: parameters: - query: - createdAtEnd: 1687450500000 - createdAtStart: 1687450500000 - orderBy: "updatedAt|DESC" - updatedAtEnd: 1687450500000 - updatedAtStart: 1687450500000 + path: + workspaceId: "" + requestBody: + application/json: {"actorType": "destination", "configuration": {}, "name": "microsoft-teams"} + createWorkspace: + Workspace Creation Request Example: + requestBody: + application/json: {"name": "Company Workspace Name"} responses: "200": - application/json: {"data": [{"connectionId": "", "jobId": 4995, "jobType": "sync", "startTime": "", "status": "running"}], "next": "https://api.airbyte.com/v1/jobs?limit=5&offset=10", "previous": "https://api.airbyte.com/v1/jobs?limit=5&offset=0"} - listOrganizationsForUser: - speakeasy-default-list-organizations-for-user: + application/json: {"dataResidency": "", "name": "", "notifications": {}, "workspaceId": ""} + Workspace Creation Response Example: + requestBody: + application/json: {"name": ""} responses: "200": - application/json: {"data": []} - listPermissions: - speakeasy-default-list-permissions: + application/json: {"dataResidency": "", "name": "", "notifications": {}, "workspaceId": "9924bcd0-99be-453d-ba47-c2c9766f7da5"} + deleteWorkspace: + speakeasy-default-delete-workspace: + parameters: + path: + workspaceId: "" + getWorkspace: + Workspace Get Response Example: + parameters: + path: + workspaceId: "" responses: "200": - application/json: {"data": []} - listSources: - "": + application/json: {"dataResidency": "auto", "name": "Acme Company", "notifications": {}, "workspaceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826"} + listWorkspaces: + speakeasy-default-list-workspaces: parameters: query: - workspaceIds: ["df08f6b0-b364-4cc1-9b3f-96f5d2fccfb2,b0796797-de23-4fc7-a5e2-7e131314718c"] + includeDeleted: false + limit: 20 + offset: 0 responses: "200": - application/json: {"data": [{"configuration": {"api_key": "", "start_date": "2024-07-26T03:28:50.329Z"}, "createdAt": 736973, "definitionId": "", "name": "Analytics Team Postgres", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"}], "next": "https://api.airbyte.com/v1/sources?limit=5&offset=10", "previous": "https://api.airbyte.com/v1/sources?limit=5&offset=0"} - listUsersWithinAnOrganization: - speakeasy-default-list-users-within-an-organization: + application/json: {"data": [{"dataResidency": "auto", "name": "Acme Company", "notifications": {}, "workspaceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826"}], "next": "https://api.airbyte.com/v1/workspaces?limit=5&offset=10", "previous": "https://api.airbyte.com/v1/workspaces?limit=5&offset=0"} + updateWorkspace: + Workspace Update Request Example: parameters: - query: - organizationId: "" + path: + workspaceId: "" + requestBody: + application/json: {"name": "Company Workspace Name"} responses: "200": - application/json: {"data": []} - listWorkspaces: - speakeasy-default-list-workspaces: + application/json: {"dataResidency": "", "name": "", "notifications": {}, "workspaceId": ""} + Workspace Update Response Example: + parameters: + path: + workspaceId: "" + requestBody: + application/json: {} responses: "200": - application/json: {"data": [{"dataResidency": "auto", "name": "Acme Company", "workspaceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826"}], "next": "https://api.airbyte.com/v1/workspaces?limit=5&offset=10", "previous": "https://api.airbyte.com/v1/workspaces?limit=5&offset=0"} - patchConnection: - Connection Get Response Example: + application/json: {"dataResidency": "", "name": "", "notifications": {}, "workspaceId": "9924bcd0-99be-453d-ba47-c2c9766f7da5"} + createDeclarativeSourceDefinition: + speakeasy-default-create-declarative-source-definition: parameters: path: - connectionId: "" + workspaceId: "2d054f48-a68c-4d16-b04d-bb444d47c285" requestBody: - application/json: {"namespaceFormat": "${SOURCE_NAMESPACE}"} + application/json: {"manifest": "", "name": ""} responses: "200": - application/json: {"connectionId": "", "createdAt": 961870, "destinationId": "744cc0ed-7f05-4949-9e60-2a814f90c035", "name": "Postgres To Snowflake", "schedule": {"scheduleType": "basic"}, "sourceId": "9924bcd0-99be-453d-ba47-c2c9766f7da5", "status": "inactive", "workspaceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826"} - Connection Update Request Example: + application/json: {"id": "", "manifest": "", "name": "", "version": 878216} + deleteDeclarativeSourceDefinition: + speakeasy-default-delete-declarative-source-definition: parameters: path: - connectionId: "" - requestBody: - application/json: {"name": "Postgres-to-Bigquery", "namespaceFormat": "${SOURCE_NAMESPACE}"} + workspaceId: "f7cdc65f-5255-43d5-a6be-8fee673091f3" + definitionId: "26cd06ea-5caa-47b9-98a2-1d217049557d" responses: "200": - application/json: {"connectionId": "", "createdAt": 106227, "destinationId": "", "name": "", "schedule": {"scheduleType": "manual"}, "sourceId": "", "status": "active", "workspaceId": ""} - patchDestination: - Destination Update Request Example: + application/json: {"id": "", "manifest": "", "name": "", "version": 520411} + getDeclarativeSourceDefinition: + speakeasy-default-get-declarative-source-definition: parameters: path: - destinationId: "" - requestBody: - application/json: {"configuration": {"destination_path": "motherduck:"}, "name": "My Destination"} + workspaceId: "3855d0f6-8cfb-44c2-ac49-0c3965c034bd" + definitionId: "a003b7d3-efd4-4d7e-8ea6-469e9fe7871f" responses: "200": - application/json: {"configuration": {"database": "", "host": "disloyal-lox.net", "username": "Tania.Lehner-Yundt67"}, "createdAt": 195047, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "postgres", "name": "Analytics Team Postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} - Destination Update Response Example: + application/json: {"id": "", "manifest": "", "name": "", "version": 250999} + listDeclarativeSourceDefinitions: + speakeasy-default-list-declarative-source-definitions: parameters: path: - destinationId: "" - requestBody: - application/json: {"configuration": {"corpus_name": "", "customer_id": "", "oauth2": {"client_id": "", "client_secret": ""}}} + workspaceId: "23bc0a4f-72b3-4d91-abe3-3f32d8a49dfc" responses: "200": - application/json: {"configuration": {"destination_path": "motherduck:"}, "createdAt": 663057, "definitionId": "", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "", "name": "running", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} - patchSource: - Source Update Request Example: + application/json: {"data": []} + updateDeclarativeSourceDefinition: + speakeasy-default-update-declarative-source-definition: parameters: path: - sourceId: "" + workspaceId: "87f1ccdb-71b2-401c-8f60-cac1f2a2da80" + definitionId: "66066427-c293-4cbf-b72e-b31a72a46545" requestBody: - application/json: {"configuration": {"x-api-key": ""}, "name": "My Source", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} + application/json: {"manifest": ""} responses: "200": - application/json: {"configuration": {"secret": ""}, "createdAt": 183665, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "name": "Analytics Team Postgres", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} - Source Update Response Example: + application/json: {"id": "", "manifest": "", "name": "", "version": 92956} + createDestinationDefinition: + speakeasy-default-create-destination-definition: parameters: path: - sourceId: "" + workspaceId: "f49928fc-e1f7-4278-9366-b5b974ad2068" requestBody: - application/json: {"configuration": {"credentials": {"client_id": "", "client_secret": "", "refresh_token": ""}}, "name": "My source"} + application/json: {"dockerImageTag": "", "dockerRepository": "", "name": ""} responses: "200": - application/json: {"configuration": {"api_key": ""}, "createdAt": 776926, "definitionId": "", "name": "running", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} - putDestination: - Destination Update Request Example: + application/json: {"dockerImageTag": "", "dockerRepository": "", "id": "", "name": ""} + deleteDestinationDefinition: + speakeasy-default-delete-destination-definition: parameters: path: - destinationId: "" - requestBody: - application/json: {"configuration": {"database": "", "host": "urban-receptor.org", "username": "Kaylie_Terry"}, "name": "My Destination"} + workspaceId: "619cc567-a21d-4f39-90ab-7854d54c9c42" + definitionId: "7a6d93e0-5a99-4e33-87ce-c0e739faf1e9" responses: "200": - application/json: {"configuration": {"project_id": ""}, "createdAt": 946510, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "postgres", "name": "Analytics Team Postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} - Destination Update Response Example: + application/json: {"dockerImageTag": "", "dockerRepository": "", "id": "", "name": ""} + getDestinationDefinition: + speakeasy-default-get-destination-definition: parameters: path: - destinationId: "" - requestBody: - application/json: {"configuration": {"credential": {"hmac_key_access_id": "1234567890abcdefghij1234", "hmac_key_secret": "1234567890abcdefghij1234567890ABCDEFGHIJ"}, "gcs_bucket_name": "airbyte_sync", "gcs_bucket_path": "data_sync/test"}, "name": ""} + workspaceId: "5a9c29a5-f169-496b-b3b1-ab05028ede0b" + definitionId: "5ddd49a6-7aa1-469d-bd19-fa66e3586402" responses: "200": - application/json: {"configuration": {"database": "", "host": "concerned-warming.biz", "username": "Helen.Lubowitz52"}, "createdAt": 490036, "definitionId": "", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "", "name": "running", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} - putSource: - Source Update Request Example: + application/json: {"dockerImageTag": "", "dockerRepository": "", "id": "", "name": ""} + listDestinationDefinitions: + speakeasy-default-list-destination-definitions: parameters: path: - sourceId: "" - requestBody: - application/json: {"configuration": {"api_key": "", "grid_id": ""}, "name": "My Source"} + workspaceId: "f1f18267-b72b-4ea5-a29c-8742c80ceaf4" responses: "200": - application/json: {"configuration": {"api_key": "", "api_secret": "", "shop": "", "start_date": "2021-01-01"}, "createdAt": 27682, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "name": "Analytics Team Postgres", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} - Source Update Response Example: + application/json: {"data": [{"dockerImageTag": "", "dockerRepository": "", "id": "", "name": ""}]} + updateDestinationDefinition: + speakeasy-default-update-destination-definition: parameters: path: - sourceId: "" + workspaceId: "98e0ed50-276f-49ae-ad18-43bc892bb109" + definitionId: "97416649-dabf-43f9-8715-c5c8279f7f23" requestBody: - application/json: {"configuration": {"client_id": "", "developer_token": "", "refresh_token": ""}, "name": ""} + application/json: {"dockerImageTag": "", "name": ""} responses: "200": - application/json: {"configuration": {"custom_reports": [], "start_date": "2022-07-28"}, "createdAt": 476911, "definitionId": "", "name": "running", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} - updatePermission: - speakeasy-default-update-permission: + application/json: {"dockerImageTag": "", "dockerRepository": "", "id": "", "name": ""} + createSourceDefinition: + speakeasy-default-create-source-definition: parameters: path: - permissionId: "" + workspaceId: "06dbde72-63a8-4326-8f4b-67eb708f9ad6" requestBody: - application/json: {"permissionType": "organization_member"} + application/json: {"dockerImageTag": "", "dockerRepository": "", "name": ""} responses: "200": - application/json: {"permissionId": "157b7736-f791-41ce-961d-60b7dd699010", "permissionType": "organization_member", "userId": "24bf7abc-c5a0-4e5a-9be2-69537c07fe9b"} - updateWorkspace: - Workspace Update Request Example: + application/json: {"dockerImageTag": "", "dockerRepository": "", "id": "", "name": ""} + deleteSourceDefinition: + speakeasy-default-delete-source-definition: parameters: path: - workspaceId: "" - requestBody: - application/json: {"name": "Company Workspace Name"} + workspaceId: "9789f575-f200-4155-b7ec-0750094af77f" + definitionId: "fddaf9d9-7e09-433e-8e25-895734ad8809" responses: "200": - application/json: {"name": "", "workspaceId": ""} - Workspace Update Response Example: + application/json: {"dockerImageTag": "", "dockerRepository": "", "id": "", "name": ""} + getSourceDefinition: + speakeasy-default-get-source-definition: parameters: path: - workspaceId: "" + workspaceId: "e76093e5-5cd8-4b87-ab32-c620a178a1c3" + definitionId: "b6405f71-0930-4f13-a99b-6b1b0a882853" + responses: + "200": + application/json: {"dockerImageTag": "", "dockerRepository": "", "id": "", "name": ""} + listSourceDefinitions: + speakeasy-default-list-source-definitions: + parameters: + path: + workspaceId: "fb60a310-f38b-47cb-9633-01f0cf740c18" + responses: + "200": + application/json: {"data": [{"dockerImageTag": "", "dockerRepository": "", "id": "", "name": ""}, {"dockerImageTag": "", "dockerRepository": "", "id": "", "name": ""}]} + updateSourceDefinition: + speakeasy-default-update-source-definition: + parameters: + path: + workspaceId: "b6bd5c36-3814-4489-97fb-3e48c1e0fdea" + definitionId: "6eaf6fbb-3e08-4f73-9ff1-de62553abd76" requestBody: - application/json: {"name": ""} + application/json: {"dockerImageTag": "", "name": ""} responses: "200": - application/json: {"name": "", "workspaceId": "9924bcd0-99be-453d-ba47-c2c9766f7da5"} + application/json: {"dockerImageTag": "", "dockerRepository": "", "id": "", "name": ""} +examplesVersion: 1.0.0 generatedTests: {} diff --git a/.speakeasy/workflow.lock b/.speakeasy/workflow.lock index 33f1fb95..48f3f343 100644 --- a/.speakeasy/workflow.lock +++ b/.speakeasy/workflow.lock @@ -1,24 +1,24 @@ -speakeasyVersion: 1.453.10 +speakeasyVersion: 1.508.0 sources: my-source: sourceNamespace: my-source - sourceRevisionDigest: sha256:3712f551c1bc9d55089a54598938b61b271ddcc1a81583646d883bae9f085825 - sourceBlobDigest: sha256:fa5143179ee978611fb032f3948584f4cbf071857ff2ff6611fbac17e1b67eb9 + sourceRevisionDigest: sha256:1d2f15b9c790a784932030450e0ebac32bef1bd690cd86c1d7f7968c1accb931 + sourceBlobDigest: sha256:d0a881322fa4de4a316a25d0c5504263e8a3fc55d31d825e47a6c8de61d9641a tags: - latest - - speakeasy-sdk-regen-1730420397 + - speakeasy-sdk-regen-1759191606 - 1.0.0 targets: python-api: source: my-source sourceNamespace: my-source - sourceRevisionDigest: sha256:3712f551c1bc9d55089a54598938b61b271ddcc1a81583646d883bae9f085825 - sourceBlobDigest: sha256:fa5143179ee978611fb032f3948584f4cbf071857ff2ff6611fbac17e1b67eb9 + sourceRevisionDigest: sha256:1d2f15b9c790a784932030450e0ebac32bef1bd690cd86c1d7f7968c1accb931 + sourceBlobDigest: sha256:d0a881322fa4de4a316a25d0c5504263e8a3fc55d31d825e47a6c8de61d9641a codeSamplesNamespace: my-source-python-code-samples - codeSamplesRevisionDigest: sha256:06fc180ad0f496ce01459cc965e47d9c73051b3339c4ffa34dd99997da5e2bf9 + codeSamplesRevisionDigest: sha256:a9c4fd43ac0bf5205301bb214e4d267ae9955228d7a46614ff0cd5393dc37d8a workflow: workflowVersion: 1.0.0 - speakeasyVersion: latest + speakeasyVersion: 1.508.0 sources: my-source: inputs: diff --git a/README.md b/README.md index 367c5fe5..33ca8122 100755 --- a/README.md +++ b/README.md @@ -66,7 +66,6 @@ res = s.connections.create_connection(request=models.ConnectionCreateRequest( destination_id='e478de0d-a3a0-475c-b019-25f7dd29e281', source_id='95e66a59-8045-4307-9678-63bc3c9b8c93', name='Postgres-to-Bigquery', - namespace_format='${SOURCE_NAMESPACE}', )) if res.connection_response is not None: @@ -91,6 +90,22 @@ if res.connection_response is not None: * [list_connections](docs/sdks/connections/README.md#list_connections) - List connections * [patch_connection](docs/sdks/connections/README.md#patch_connection) - Update Connection details +### [declarative_source_definitions](docs/sdks/declarativesourcedefinitions/README.md) + +* [create_declarative_source_definition](docs/sdks/declarativesourcedefinitions/README.md#create_declarative_source_definition) - Create a declarative source definition. +* [delete_declarative_source_definition](docs/sdks/declarativesourcedefinitions/README.md#delete_declarative_source_definition) - Delete a declarative source definition. +* [get_declarative_source_definition](docs/sdks/declarativesourcedefinitions/README.md#get_declarative_source_definition) - Get declarative source definition details. +* [list_declarative_source_definitions](docs/sdks/declarativesourcedefinitions/README.md#list_declarative_source_definitions) - List declarative source definitions. +* [update_declarative_source_definition](docs/sdks/declarativesourcedefinitions/README.md#update_declarative_source_definition) - Update declarative source definition details. + +### [destination_definitions](docs/sdks/destinationdefinitions/README.md) + +* [create_destination_definition](docs/sdks/destinationdefinitions/README.md#create_destination_definition) - Create a destination definition. +* [delete_destination_definition](docs/sdks/destinationdefinitions/README.md#delete_destination_definition) - Delete a destination definition. +* [get_destination_definition](docs/sdks/destinationdefinitions/README.md#get_destination_definition) - Get destination definition details. +* [list_destination_definitions](docs/sdks/destinationdefinitions/README.md#list_destination_definitions) - List destination definitions. +* [update_destination_definition](docs/sdks/destinationdefinitions/README.md#update_destination_definition) - Update destination definition details. + ### [destinations](docs/sdks/destinations/README.md) * [create_destination](docs/sdks/destinations/README.md#create_destination) - Create a destination @@ -113,6 +128,7 @@ if res.connection_response is not None: ### [organizations](docs/sdks/organizations/README.md) +* [create_or_update_organization_o_auth_credentials](docs/sdks/organizations/README.md#create_or_update_organization_o_auth_credentials) - Create OAuth override credentials for an organization and source type. * [list_organizations_for_user](docs/sdks/organizations/README.md#list_organizations_for_user) - List all organizations for a user ### [permissions](docs/sdks/permissions/README.md) @@ -123,6 +139,14 @@ if res.connection_response is not None: * [list_permissions](docs/sdks/permissions/README.md#list_permissions) - List Permissions by user id * [update_permission](docs/sdks/permissions/README.md#update_permission) - Update a permission +### [source_definitions](docs/sdks/sourcedefinitions/README.md) + +* [create_source_definition](docs/sdks/sourcedefinitions/README.md#create_source_definition) - Create a source definition. +* [delete_source_definition](docs/sdks/sourcedefinitions/README.md#delete_source_definition) - Delete a source definition. +* [get_source_definition](docs/sdks/sourcedefinitions/README.md#get_source_definition) - Get source definition details. +* [list_source_definitions](docs/sdks/sourcedefinitions/README.md#list_source_definitions) - List source definitions. +* [update_source_definition](docs/sdks/sourcedefinitions/README.md#update_source_definition) - Update source definition details. + ### [sources](docs/sdks/sources/README.md) * [create_source](docs/sdks/sources/README.md#create_source) - Create a source @@ -137,6 +161,14 @@ if res.connection_response is not None: * [get_stream_properties](docs/sdks/streams/README.md#get_stream_properties) - Get stream properties +### [tags](docs/sdks/tags/README.md) + +* [create_tag](docs/sdks/tags/README.md#create_tag) - Create a tag +* [delete_tag](docs/sdks/tags/README.md#delete_tag) - Delete a tag +* [get_tag](docs/sdks/tags/README.md#get_tag) - Get a tag +* [list_tags](docs/sdks/tags/README.md#list_tags) - List all tags +* [update_tag](docs/sdks/tags/README.md#update_tag) - Update a tag + ### [users](docs/sdks/users/README.md) * [list_users_within_an_organization](docs/sdks/users/README.md#list_users_within_an_organization) - List all users within an organization @@ -200,7 +232,6 @@ try: destination_id='e478de0d-a3a0-475c-b019-25f7dd29e281', source_id='95e66a59-8045-4307-9678-63bc3c9b8c93', name='Postgres-to-Bigquery', - namespace_format='${SOURCE_NAMESPACE}', )) except errors.SDKError as e: @@ -221,7 +252,7 @@ if res.connection_response is not None: ### Override Server URL Per-Client -The default server can also be overridden globally by passing a URL to the `server_url: str` optional parameter when initializing the SDK client instance. For example: +The default server can be overridden globally by passing a URL to the `server_url: str` optional parameter when initializing the SDK client instance. For example: ```python import airbyte_api from airbyte_api import models @@ -241,7 +272,6 @@ res = s.connections.create_connection(request=models.ConnectionCreateRequest( destination_id='e478de0d-a3a0-475c-b019-25f7dd29e281', source_id='95e66a59-8045-4307-9678-63bc3c9b8c93', name='Postgres-to-Bigquery', - namespace_format='${SOURCE_NAMESPACE}', )) if res.connection_response is not None: @@ -303,7 +333,6 @@ res = s.connections.create_connection(request=models.ConnectionCreateRequest( destination_id='e478de0d-a3a0-475c-b019-25f7dd29e281', source_id='95e66a59-8045-4307-9678-63bc3c9b8c93', name='Postgres-to-Bigquery', - namespace_format='${SOURCE_NAMESPACE}', )) if res.connection_response is not None: diff --git a/RELEASES.md b/RELEASES.md index de3fae20..8ca595c4 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -968,4 +968,14 @@ Based on: ### Generated - [python v0.52.2] . ### Releases -- [PyPI v0.52.2] https://pypi.org/project/airbyte-api/0.52.2 - . \ No newline at end of file +- [PyPI v0.52.2] https://pypi.org/project/airbyte-api/0.52.2 - . + +## 2025-10-02 00:18:23 +### Changes +Based on: +- OpenAPI Doc +- Speakeasy CLI 1.508.0 (2.536.0) https://github.com/speakeasy-api/speakeasy +### Generated +- [python v0.53.0] . +### Releases +- [PyPI v0.53.0] https://pypi.org/project/airbyte-api/0.53.0 - . \ No newline at end of file diff --git a/USAGE.md b/USAGE.md index 130ca490..f6935d47 100644 --- a/USAGE.md +++ b/USAGE.md @@ -17,7 +17,6 @@ res = s.connections.create_connection(request=models.ConnectionCreateRequest( destination_id='e478de0d-a3a0-475c-b019-25f7dd29e281', source_id='95e66a59-8045-4307-9678-63bc3c9b8c93', name='Postgres-to-Bigquery', - namespace_format='${SOURCE_NAMESPACE}', )) if res.connection_response is not None: diff --git a/docs/api/createdeclarativesourcedefinitionrequest.md b/docs/api/createdeclarativesourcedefinitionrequest.md new file mode 100644 index 00000000..24a4594c --- /dev/null +++ b/docs/api/createdeclarativesourcedefinitionrequest.md @@ -0,0 +1,9 @@ +# CreateDeclarativeSourceDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | +| `create_declarative_source_definition_request` | [models.CreateDeclarativeSourceDefinitionRequest](../models/createdeclarativesourcedefinitionrequest.md) | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/createdeclarativesourcedefinitionresponse.md b/docs/api/createdeclarativesourcedefinitionresponse.md new file mode 100644 index 00000000..7db2f277 --- /dev/null +++ b/docs/api/createdeclarativesourcedefinitionresponse.md @@ -0,0 +1,11 @@ +# CreateDeclarativeSourceDefinitionResponse + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `declarative_source_definition_response` | [Optional[models.DeclarativeSourceDefinitionResponse]](../models/declarativesourcedefinitionresponse.md) | :heavy_minus_sign: | Success | \ No newline at end of file diff --git a/docs/api/createdestinationdefinitionrequest.md b/docs/api/createdestinationdefinitionrequest.md new file mode 100644 index 00000000..5a40f143 --- /dev/null +++ b/docs/api/createdestinationdefinitionrequest.md @@ -0,0 +1,9 @@ +# CreateDestinationDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | +| `create_definition_request` | [models.CreateDefinitionRequest](../models/createdefinitionrequest.md) | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/createdestinationdefinitionresponse.md b/docs/api/createdestinationdefinitionresponse.md new file mode 100644 index 00000000..496f698f --- /dev/null +++ b/docs/api/createdestinationdefinitionresponse.md @@ -0,0 +1,11 @@ +# CreateDestinationDefinitionResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `definition_response` | [Optional[models.DefinitionResponse]](../models/definitionresponse.md) | :heavy_minus_sign: | Success | \ No newline at end of file diff --git a/docs/api/createorupdateorganizationoauthcredentialsrequest.md b/docs/api/createorupdateorganizationoauthcredentialsrequest.md new file mode 100644 index 00000000..8800a8dd --- /dev/null +++ b/docs/api/createorupdateorganizationoauthcredentialsrequest.md @@ -0,0 +1,9 @@ +# CreateOrUpdateOrganizationOAuthCredentialsRequest + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | +| `organization_o_auth_credentials_request` | [models.OrganizationOAuthCredentialsRequest](../models/organizationoauthcredentialsrequest.md) | :heavy_check_mark: | N/A | +| `organization_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/createorupdateorganizationoauthcredentialsresponse.md b/docs/api/createorupdateorganizationoauthcredentialsresponse.md new file mode 100644 index 00000000..989a0817 --- /dev/null +++ b/docs/api/createorupdateorganizationoauthcredentialsresponse.md @@ -0,0 +1,10 @@ +# CreateOrUpdateOrganizationOAuthCredentialsResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | \ No newline at end of file diff --git a/docs/api/createsourcedefinitionrequest.md b/docs/api/createsourcedefinitionrequest.md new file mode 100644 index 00000000..5f07f80c --- /dev/null +++ b/docs/api/createsourcedefinitionrequest.md @@ -0,0 +1,9 @@ +# CreateSourceDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | +| `create_definition_request` | [models.CreateDefinitionRequest](../models/createdefinitionrequest.md) | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/createsourcedefinitionresponse.md b/docs/api/createsourcedefinitionresponse.md new file mode 100644 index 00000000..64ad38bf --- /dev/null +++ b/docs/api/createsourcedefinitionresponse.md @@ -0,0 +1,11 @@ +# CreateSourceDefinitionResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `definition_response` | [Optional[models.DefinitionResponse]](../models/definitionresponse.md) | :heavy_minus_sign: | Success | \ No newline at end of file diff --git a/docs/api/createtagresponse.md b/docs/api/createtagresponse.md new file mode 100644 index 00000000..1eebeec6 --- /dev/null +++ b/docs/api/createtagresponse.md @@ -0,0 +1,11 @@ +# CreateTagResponse + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | | +| `tag_response` | [Optional[models.TagResponse]](../models/tagresponse.md) | :heavy_minus_sign: | Successful operation | {
"tagId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826",
"name": "Analytics Team",
"color": "FF5733",
"workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"
} | \ No newline at end of file diff --git a/docs/api/deletedeclarativesourcedefinitionrequest.md b/docs/api/deletedeclarativesourcedefinitionrequest.md new file mode 100644 index 00000000..4fc89b6a --- /dev/null +++ b/docs/api/deletedeclarativesourcedefinitionrequest.md @@ -0,0 +1,9 @@ +# DeleteDeclarativeSourceDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `definition_id` | *str* | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/deletedeclarativesourcedefinitionresponse.md b/docs/api/deletedeclarativesourcedefinitionresponse.md new file mode 100644 index 00000000..eb8214b3 --- /dev/null +++ b/docs/api/deletedeclarativesourcedefinitionresponse.md @@ -0,0 +1,11 @@ +# DeleteDeclarativeSourceDefinitionResponse + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `declarative_source_definition_response` | [Optional[models.DeclarativeSourceDefinitionResponse]](../models/declarativesourcedefinitionresponse.md) | :heavy_minus_sign: | Success | \ No newline at end of file diff --git a/docs/api/deletedestinationdefinitionrequest.md b/docs/api/deletedestinationdefinitionrequest.md new file mode 100644 index 00000000..af10e952 --- /dev/null +++ b/docs/api/deletedestinationdefinitionrequest.md @@ -0,0 +1,9 @@ +# DeleteDestinationDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `definition_id` | *str* | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/deletedestinationdefinitionresponse.md b/docs/api/deletedestinationdefinitionresponse.md new file mode 100644 index 00000000..10387a01 --- /dev/null +++ b/docs/api/deletedestinationdefinitionresponse.md @@ -0,0 +1,11 @@ +# DeleteDestinationDefinitionResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `definition_response` | [Optional[models.DefinitionResponse]](../models/definitionresponse.md) | :heavy_minus_sign: | Success | \ No newline at end of file diff --git a/docs/api/deletesourcedefinitionrequest.md b/docs/api/deletesourcedefinitionrequest.md new file mode 100644 index 00000000..5e87c9d2 --- /dev/null +++ b/docs/api/deletesourcedefinitionrequest.md @@ -0,0 +1,9 @@ +# DeleteSourceDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `definition_id` | *str* | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/deletesourcedefinitionresponse.md b/docs/api/deletesourcedefinitionresponse.md new file mode 100644 index 00000000..0c7c0428 --- /dev/null +++ b/docs/api/deletesourcedefinitionresponse.md @@ -0,0 +1,11 @@ +# DeleteSourceDefinitionResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `definition_response` | [Optional[models.DefinitionResponse]](../models/definitionresponse.md) | :heavy_minus_sign: | Success | \ No newline at end of file diff --git a/docs/api/deletetagrequest.md b/docs/api/deletetagrequest.md new file mode 100644 index 00000000..42540458 --- /dev/null +++ b/docs/api/deletetagrequest.md @@ -0,0 +1,8 @@ +# DeleteTagRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `tag_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/deletetagresponse.md b/docs/api/deletetagresponse.md new file mode 100644 index 00000000..64dff772 --- /dev/null +++ b/docs/api/deletetagresponse.md @@ -0,0 +1,10 @@ +# DeleteTagResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | \ No newline at end of file diff --git a/docs/api/getdeclarativesourcedefinitionrequest.md b/docs/api/getdeclarativesourcedefinitionrequest.md new file mode 100644 index 00000000..0904f327 --- /dev/null +++ b/docs/api/getdeclarativesourcedefinitionrequest.md @@ -0,0 +1,9 @@ +# GetDeclarativeSourceDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `definition_id` | *str* | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/getdeclarativesourcedefinitionresponse.md b/docs/api/getdeclarativesourcedefinitionresponse.md new file mode 100644 index 00000000..7348c498 --- /dev/null +++ b/docs/api/getdeclarativesourcedefinitionresponse.md @@ -0,0 +1,11 @@ +# GetDeclarativeSourceDefinitionResponse + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `declarative_source_definition_response` | [Optional[models.DeclarativeSourceDefinitionResponse]](../models/declarativesourcedefinitionresponse.md) | :heavy_minus_sign: | Success | \ No newline at end of file diff --git a/docs/api/getdestinationdefinitionrequest.md b/docs/api/getdestinationdefinitionrequest.md new file mode 100644 index 00000000..9a1125ba --- /dev/null +++ b/docs/api/getdestinationdefinitionrequest.md @@ -0,0 +1,9 @@ +# GetDestinationDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `definition_id` | *str* | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/getdestinationdefinitionresponse.md b/docs/api/getdestinationdefinitionresponse.md new file mode 100644 index 00000000..12ac0745 --- /dev/null +++ b/docs/api/getdestinationdefinitionresponse.md @@ -0,0 +1,11 @@ +# GetDestinationDefinitionResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `definition_response` | [Optional[models.DefinitionResponse]](../models/definitionresponse.md) | :heavy_minus_sign: | Success | \ No newline at end of file diff --git a/docs/api/getdestinationrequest.md b/docs/api/getdestinationrequest.md index da3a9d27..7b7c1333 100644 --- a/docs/api/getdestinationrequest.md +++ b/docs/api/getdestinationrequest.md @@ -3,6 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ------------------ | ------------------ | ------------------ | ------------------ | -| `destination_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | +| `destination_id` | *str* | :heavy_check_mark: | N/A | +| `include_secret_coordinates` | *Optional[bool]* | :heavy_minus_sign: | Rather than return *** for secret properties include the secret coordinate information | \ No newline at end of file diff --git a/docs/api/getsourcedefinitionrequest.md b/docs/api/getsourcedefinitionrequest.md new file mode 100644 index 00000000..b496dead --- /dev/null +++ b/docs/api/getsourcedefinitionrequest.md @@ -0,0 +1,9 @@ +# GetSourceDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `definition_id` | *str* | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/getsourcedefinitionresponse.md b/docs/api/getsourcedefinitionresponse.md new file mode 100644 index 00000000..98f04c49 --- /dev/null +++ b/docs/api/getsourcedefinitionresponse.md @@ -0,0 +1,11 @@ +# GetSourceDefinitionResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `definition_response` | [Optional[models.DefinitionResponse]](../models/definitionresponse.md) | :heavy_minus_sign: | Success | \ No newline at end of file diff --git a/docs/api/getsourcerequest.md b/docs/api/getsourcerequest.md index bc52e166..7d7e9f0d 100644 --- a/docs/api/getsourcerequest.md +++ b/docs/api/getsourcerequest.md @@ -3,6 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ------------------ | ------------------ | ------------------ | ------------------ | -| `source_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | +| `source_id` | *str* | :heavy_check_mark: | N/A | +| `include_secret_coordinates` | *Optional[bool]* | :heavy_minus_sign: | Rather than return *** for secret properties include the secret coordinate information | \ No newline at end of file diff --git a/docs/api/gettagrequest.md b/docs/api/gettagrequest.md new file mode 100644 index 00000000..89dc9cc9 --- /dev/null +++ b/docs/api/gettagrequest.md @@ -0,0 +1,8 @@ +# GetTagRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `tag_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/gettagresponse.md b/docs/api/gettagresponse.md new file mode 100644 index 00000000..2384dd67 --- /dev/null +++ b/docs/api/gettagresponse.md @@ -0,0 +1,11 @@ +# GetTagResponse + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | | +| `tag_response` | [Optional[models.TagResponse]](../models/tagresponse.md) | :heavy_minus_sign: | Successful operation | {
"tagId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826",
"name": "Analytics Team",
"color": "FF5733",
"workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"
} | \ No newline at end of file diff --git a/docs/api/listconnectionsrequest.md b/docs/api/listconnectionsrequest.md index 6b449128..6020e9ea 100644 --- a/docs/api/listconnectionsrequest.md +++ b/docs/api/listconnectionsrequest.md @@ -8,4 +8,5 @@ | `include_deleted` | *Optional[bool]* | :heavy_minus_sign: | Include deleted connections in the returned results. | | `limit` | *Optional[int]* | :heavy_minus_sign: | Set the limit on the number of Connections returned. The default is 20. | | `offset` | *Optional[int]* | :heavy_minus_sign: | Set the offset to start at when returning Connections. The default is 0 | +| `tag_ids` | List[*str*] | :heavy_minus_sign: | The UUIDs of the tags you wish to list connections for. Empty list will retrieve all connections. | | `workspace_ids` | List[*str*] | :heavy_minus_sign: | The UUIDs of the workspaces you wish to list connections for. Empty list will retrieve all allowed workspaces. | \ No newline at end of file diff --git a/docs/api/listconnectionsresponse.md b/docs/api/listconnectionsresponse.md index bde7ba9a..2df50875 100644 --- a/docs/api/listconnectionsresponse.md +++ b/docs/api/listconnectionsresponse.md @@ -3,9 +3,9 @@ ## Fields -| Field | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | | -| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | | -| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | | -| `connections_response` | [Optional[models.ConnectionsResponse]](../models/connectionsresponse.md) | :heavy_minus_sign: | Successful operation | {
"next": "https://api.airbyte.com/v1/connections?limit=5\u0026offset=10",
"previous": "https://api.airbyte.com/v1/connections?limit=5\u0026offset=0",
"data": [
{
"name": "test-connection"
},
{
"connection_id": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826"
},
{
"sourceId": "49237019-645d-47d4-b45b-5eddf97775ce"
},
{
"destinationId": "al312fs-0ab1-4f72-9ed7-0b8fc27c5826"
},
{
"schedule": {
"scheduleType": "manual"
}
},
{
"status": "active"
},
{
"dataResidency": "auto"
}
]
} | \ No newline at end of file +| Field | Type | Required | Description | Example | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | | +| `connections_response` | [Optional[models.ConnectionsResponse]](../models/connectionsresponse.md) | :heavy_minus_sign: | Successful operation | {
"next": "https://api.airbyte.com/v1/connections?limit=5\u0026offset=10",
"previous": "https://api.airbyte.com/v1/connections?limit=5\u0026offset=0",
"data": [
{
"name": "test-connection"
},
{
"connection_id": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826"
},
{
"sourceId": "49237019-645d-47d4-b45b-5eddf97775ce"
},
{
"destinationId": "al312fs-0ab1-4f72-9ed7-0b8fc27c5826"
},
{
"schedule": {
"scheduleType": "manual"
}
},
{
"status": "active"
}
]
} | \ No newline at end of file diff --git a/docs/api/listdeclarativesourcedefinitionsrequest.md b/docs/api/listdeclarativesourcedefinitionsrequest.md new file mode 100644 index 00000000..3d2f0119 --- /dev/null +++ b/docs/api/listdeclarativesourcedefinitionsrequest.md @@ -0,0 +1,8 @@ +# ListDeclarativeSourceDefinitionsRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/listdeclarativesourcedefinitionsresponse.md b/docs/api/listdeclarativesourcedefinitionsresponse.md new file mode 100644 index 00000000..2ce7760e --- /dev/null +++ b/docs/api/listdeclarativesourcedefinitionsresponse.md @@ -0,0 +1,11 @@ +# ListDeclarativeSourceDefinitionsResponse + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `declarative_source_definitions_response` | [Optional[models.DeclarativeSourceDefinitionsResponse]](../models/declarativesourcedefinitionsresponse.md) | :heavy_minus_sign: | Successful operation | \ No newline at end of file diff --git a/docs/api/listdestinationdefinitionsrequest.md b/docs/api/listdestinationdefinitionsrequest.md new file mode 100644 index 00000000..c7f61189 --- /dev/null +++ b/docs/api/listdestinationdefinitionsrequest.md @@ -0,0 +1,8 @@ +# ListDestinationDefinitionsRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/listdestinationdefinitionsresponse.md b/docs/api/listdestinationdefinitionsresponse.md new file mode 100644 index 00000000..931f25d4 --- /dev/null +++ b/docs/api/listdestinationdefinitionsresponse.md @@ -0,0 +1,11 @@ +# ListDestinationDefinitionsResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `definitions_response` | [Optional[models.DefinitionsResponse]](../models/definitionsresponse.md) | :heavy_minus_sign: | Successful operation | \ No newline at end of file diff --git a/docs/api/listsourcedefinitionsrequest.md b/docs/api/listsourcedefinitionsrequest.md new file mode 100644 index 00000000..8703336d --- /dev/null +++ b/docs/api/listsourcedefinitionsrequest.md @@ -0,0 +1,8 @@ +# ListSourceDefinitionsRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/listsourcedefinitionsresponse.md b/docs/api/listsourcedefinitionsresponse.md new file mode 100644 index 00000000..a9e9b8d9 --- /dev/null +++ b/docs/api/listsourcedefinitionsresponse.md @@ -0,0 +1,11 @@ +# ListSourceDefinitionsResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `definitions_response` | [Optional[models.DefinitionsResponse]](../models/definitionsresponse.md) | :heavy_minus_sign: | Successful operation | \ No newline at end of file diff --git a/docs/api/listtagsrequest.md b/docs/api/listtagsrequest.md new file mode 100644 index 00000000..dae73d89 --- /dev/null +++ b/docs/api/listtagsrequest.md @@ -0,0 +1,8 @@ +# ListTagsRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `workspace_ids` | List[*str*] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/api/listtagsresponse.md b/docs/api/listtagsresponse.md new file mode 100644 index 00000000..1d8e6f06 --- /dev/null +++ b/docs/api/listtagsresponse.md @@ -0,0 +1,11 @@ +# ListTagsResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `tags_response` | [Optional[models.TagsResponse]](../models/tagsresponse.md) | :heavy_minus_sign: | List Tags. | \ No newline at end of file diff --git a/docs/api/updatedeclarativesourcedefinitionrequest.md b/docs/api/updatedeclarativesourcedefinitionrequest.md new file mode 100644 index 00000000..2a027343 --- /dev/null +++ b/docs/api/updatedeclarativesourcedefinitionrequest.md @@ -0,0 +1,10 @@ +# UpdateDeclarativeSourceDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | +| `update_declarative_source_definition_request` | [models.UpdateDeclarativeSourceDefinitionRequest](../models/updatedeclarativesourcedefinitionrequest.md) | :heavy_check_mark: | N/A | +| `definition_id` | *str* | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/updatedeclarativesourcedefinitionresponse.md b/docs/api/updatedeclarativesourcedefinitionresponse.md new file mode 100644 index 00000000..76200840 --- /dev/null +++ b/docs/api/updatedeclarativesourcedefinitionresponse.md @@ -0,0 +1,11 @@ +# UpdateDeclarativeSourceDefinitionResponse + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `declarative_source_definition_response` | [Optional[models.DeclarativeSourceDefinitionResponse]](../models/declarativesourcedefinitionresponse.md) | :heavy_minus_sign: | Success | \ No newline at end of file diff --git a/docs/api/updatedestinationdefinitionrequest.md b/docs/api/updatedestinationdefinitionrequest.md new file mode 100644 index 00000000..2f813bcf --- /dev/null +++ b/docs/api/updatedestinationdefinitionrequest.md @@ -0,0 +1,10 @@ +# UpdateDestinationDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | +| `update_definition_request` | [models.UpdateDefinitionRequest](../models/updatedefinitionrequest.md) | :heavy_check_mark: | N/A | +| `definition_id` | *str* | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/updatedestinationdefinitionresponse.md b/docs/api/updatedestinationdefinitionresponse.md new file mode 100644 index 00000000..a61ed6b9 --- /dev/null +++ b/docs/api/updatedestinationdefinitionresponse.md @@ -0,0 +1,11 @@ +# UpdateDestinationDefinitionResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `definition_response` | [Optional[models.DefinitionResponse]](../models/definitionresponse.md) | :heavy_minus_sign: | Success | \ No newline at end of file diff --git a/docs/api/updatesourcedefinitionrequest.md b/docs/api/updatesourcedefinitionrequest.md new file mode 100644 index 00000000..31454c14 --- /dev/null +++ b/docs/api/updatesourcedefinitionrequest.md @@ -0,0 +1,10 @@ +# UpdateSourceDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | +| `update_definition_request` | [models.UpdateDefinitionRequest](../models/updatedefinitionrequest.md) | :heavy_check_mark: | N/A | +| `definition_id` | *str* | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/updatesourcedefinitionresponse.md b/docs/api/updatesourcedefinitionresponse.md new file mode 100644 index 00000000..b21cdd60 --- /dev/null +++ b/docs/api/updatesourcedefinitionresponse.md @@ -0,0 +1,11 @@ +# UpdateSourceDefinitionResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | +| `definition_response` | [Optional[models.DefinitionResponse]](../models/definitionresponse.md) | :heavy_minus_sign: | Success | \ No newline at end of file diff --git a/docs/api/updatetagrequest.md b/docs/api/updatetagrequest.md new file mode 100644 index 00000000..68f404c1 --- /dev/null +++ b/docs/api/updatetagrequest.md @@ -0,0 +1,9 @@ +# UpdateTagRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------ | ------------------------------------------------------ | ------------------------------------------------------ | ------------------------------------------------------ | +| `tag_patch_request` | [models.TagPatchRequest](../models/tagpatchrequest.md) | :heavy_check_mark: | N/A | +| `tag_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/api/updatetagresponse.md b/docs/api/updatetagresponse.md new file mode 100644 index 00000000..e43ae383 --- /dev/null +++ b/docs/api/updatetagresponse.md @@ -0,0 +1,11 @@ +# UpdateTagResponse + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `content_type` | *str* | :heavy_check_mark: | HTTP response content type for this operation | | +| `status_code` | *int* | :heavy_check_mark: | HTTP response status code for this operation | | +| `raw_response` | [requests.Response](https://requests.readthedocs.io/en/latest/api/#requests.Response) | :heavy_check_mark: | Raw HTTP response; suitable for custom response parsing | | +| `tag_response` | [Optional[models.TagResponse]](../models/tagresponse.md) | :heavy_minus_sign: | Successful operation | {
"tagId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826",
"name": "Analytics Team",
"color": "FF5733",
"workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"
} | \ No newline at end of file diff --git a/docs/models/allow.md b/docs/models/allow.md index ac57f0ae..fbf331f9 100644 --- a/docs/models/allow.md +++ b/docs/models/allow.md @@ -5,6 +5,6 @@ Allow SSL mode. ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -| `mode` | [Optional[models.DestinationPostgresMode]](../models/destinationpostgresmode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | +| `mode` | [Optional[models.DestinationPostgresSchemasMode]](../models/destinationpostgresschemasmode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesnowflakeschemascredentialsauthtype.md b/docs/models/alpacabrokerapi.md similarity index 54% rename from docs/models/sourcesnowflakeschemascredentialsauthtype.md rename to docs/models/alpacabrokerapi.md index 466bf25b..2dfee831 100644 --- a/docs/models/sourcesnowflakeschemascredentialsauthtype.md +++ b/docs/models/alpacabrokerapi.md @@ -1,8 +1,8 @@ -# SourceSnowflakeSchemasCredentialsAuthType +# AlpacaBrokerAPI ## Values | Name | Value | | ------------------- | ------------------- | -| `USERNAME_PASSWORD` | username/password | \ No newline at end of file +| `ALPACA_BROKER_API` | alpaca-broker-api | \ No newline at end of file diff --git a/docs/models/alphavantage.md b/docs/models/alphavantage.md new file mode 100644 index 00000000..0ef807e1 --- /dev/null +++ b/docs/models/alphavantage.md @@ -0,0 +1,8 @@ +# AlphaVantage + + +## Values + +| Name | Value | +| --------------- | --------------- | +| `ALPHA_VANTAGE` | alpha-vantage | \ No newline at end of file diff --git a/docs/models/amazonsellerpartner.md b/docs/models/amazonsellerpartner.md index b360a463..e7bb172d 100644 --- a/docs/models/amazonsellerpartner.md +++ b/docs/models/amazonsellerpartner.md @@ -5,5 +5,6 @@ | Field | Type | Required | Description | | ------------------------------------- | ------------------------------------- | ------------------------------------- | ------------------------------------- | +| `app_id` | *Optional[str]* | :heavy_minus_sign: | Your Amazon Application ID. | | `lwa_app_id` | *Optional[str]* | :heavy_minus_sign: | Your Login with Amazon Client ID. | | `lwa_client_secret` | *Optional[str]* | :heavy_minus_sign: | Your Login with Amazon Client Secret. | \ No newline at end of file diff --git a/docs/models/apihost.md b/docs/models/apihost.md new file mode 100644 index 00000000..dd73952a --- /dev/null +++ b/docs/models/apihost.md @@ -0,0 +1,11 @@ +# APIHost + +https://apiz.sandbox.ebay.com for sandbox & https://apiz.ebay.com for production + + +## Values + +| Name | Value | +| ----------------------------- | ----------------------------- | +| `HTTPS_APIZ_SANDBOX_EBAY_COM` | https://apiz.sandbox.ebay.com | +| `HTTPS_APIZ_EBAY_COM` | https://apiz.ebay.com | \ No newline at end of file diff --git a/docs/models/apikey.md b/docs/models/apikey.md index c20d451f..2b4ba696 100644 --- a/docs/models/apikey.md +++ b/docs/models/apikey.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | -| `apikey` | *str* | :heavy_check_mark: | Mailchimp API Key. See the docs for information on how to generate this key. | -| `auth_type` | [models.SourceMailchimpSchemasAuthType](../models/sourcemailchimpschemasauthtype.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | +| `api_key` | *Optional[str]* | :heavy_minus_sign: | Kit/ConvertKit API Key | +| `auth_type` | [models.SourceConvertkitSchemasAuthType](../models/sourceconvertkitschemasauthtype.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/destinationsnowflakesnowflake.md b/docs/models/appsflyer.md similarity index 54% rename from docs/models/destinationsnowflakesnowflake.md rename to docs/models/appsflyer.md index 5fc7b64d..1b445df0 100644 --- a/docs/models/destinationsnowflakesnowflake.md +++ b/docs/models/appsflyer.md @@ -1,8 +1,8 @@ -# DestinationSnowflakeSnowflake +# Appsflyer ## Values | Name | Value | | ----------- | ----------- | -| `SNOWFLAKE` | snowflake | \ No newline at end of file +| `APPSFLYER` | appsflyer | \ No newline at end of file diff --git a/docs/models/assemblyai.md b/docs/models/assemblyai.md new file mode 100644 index 00000000..0a960ab6 --- /dev/null +++ b/docs/models/assemblyai.md @@ -0,0 +1,8 @@ +# Assemblyai + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `ASSEMBLYAI` | assemblyai | \ No newline at end of file diff --git a/docs/models/authenticateviaclientcredentials.md b/docs/models/authenticateviaclientcredentials.md new file mode 100644 index 00000000..ddc5a634 --- /dev/null +++ b/docs/models/authenticateviaclientcredentials.md @@ -0,0 +1,11 @@ +# AuthenticateViaClientCredentials + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | +| `app_client_id` | *str* | :heavy_check_mark: | Client ID of your Microsoft developer application | +| `app_client_secret` | *str* | :heavy_check_mark: | Client Secret of your Microsoft developer application | +| `app_tenant_id` | *str* | :heavy_check_mark: | Tenant ID of the Microsoft Azure Application | +| `auth_type` | [Optional[models.SourceAzureBlobStorageSchemasAuthType]](../models/sourceazureblobstorageschemasauthtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/authenticateviagoogleoauth.md b/docs/models/authenticateviagoogleoauth.md index 4fc88dbc..c03f825a 100644 --- a/docs/models/authenticateviagoogleoauth.md +++ b/docs/models/authenticateviagoogleoauth.md @@ -3,10 +3,9 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -| `access_token` | *str* | :heavy_check_mark: | Access Token | -| `client_id` | *str* | :heavy_check_mark: | Client ID | -| `client_secret` | *str* | :heavy_check_mark: | Client Secret | -| `refresh_token` | *str* | :heavy_check_mark: | Access Token | -| `auth_type` | [Optional[models.SourceGcsAuthType]](../models/sourcegcsauthtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | +| `client_id` | *str* | :heavy_check_mark: | The Client ID of your Google Sheets developer application. | +| `client_secret` | *str* | :heavy_check_mark: | The Client Secret of your Google Sheets developer application. | +| `refresh_token` | *str* | :heavy_check_mark: | The token for obtaining new access token. | +| `auth_type` | [Optional[models.DestinationGoogleSheetsAuthType]](../models/destinationgooglesheetsauthtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/authenticateviastorageaccountkey.md b/docs/models/authenticateviastorageaccountkey.md index 369d2ab5..adabf16f 100644 --- a/docs/models/authenticateviastorageaccountkey.md +++ b/docs/models/authenticateviastorageaccountkey.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | -| `azure_blob_storage_account_key` | *str* | :heavy_check_mark: | The Azure blob storage account key. | Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd== | -| `auth_type` | [Optional[models.SourceAzureBlobStorageSchemasAuthType]](../models/sourceazureblobstorageschemasauthtype.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | +| `azure_blob_storage_account_key` | *str* | :heavy_check_mark: | The Azure blob storage account key. | Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd== | +| `auth_type` | [Optional[models.SourceAzureBlobStorageSchemasCredentialsAuthType]](../models/sourceazureblobstorageschemascredentialsauthtype.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/authenticationmethod.md b/docs/models/authenticationmethod.md index af737103..5955e919 100644 --- a/docs/models/authenticationmethod.md +++ b/docs/models/authenticationmethod.md @@ -5,10 +5,10 @@ The type of authentication to be used ## Supported Types -### `models.NoneT` +### `models.DestinationElasticsearchNone` ```python -value: models.NoneT = /* values here */ +value: models.DestinationElasticsearchNone = /* values here */ ``` ### `models.APIKeySecret` diff --git a/docs/models/authenticationtype.md b/docs/models/authenticationtype.md index d944ace6..19c9ed41 100644 --- a/docs/models/authenticationtype.md +++ b/docs/models/authenticationtype.md @@ -3,15 +3,15 @@ ## Supported Types -### `models.SourceGoogleSearchConsoleOAuth` +### `models.SourceConvertkitOAuth20` ```python -value: models.SourceGoogleSearchConsoleOAuth = /* values here */ +value: models.SourceConvertkitOAuth20 = /* values here */ ``` -### `models.SourceGoogleSearchConsoleServiceAccountKeyAuthentication` +### `models.APIKey` ```python -value: models.SourceGoogleSearchConsoleServiceAccountKeyAuthentication = /* values here */ +value: models.APIKey = /* values here */ ``` diff --git a/docs/models/authenticationviagoogleoauth.md b/docs/models/authenticationviagoogleoauth.md deleted file mode 100644 index e51dc9f3..00000000 --- a/docs/models/authenticationviagoogleoauth.md +++ /dev/null @@ -1,12 +0,0 @@ -# AuthenticationViaGoogleOAuth - -Google API Credentials for connecting to Google Sheets and Google Drive APIs - - -## Fields - -| Field | Type | Required | Description | -| -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | -| `client_id` | *str* | :heavy_check_mark: | The Client ID of your Google Sheets developer application. | -| `client_secret` | *str* | :heavy_check_mark: | The Client Secret of your Google Sheets developer application. | -| `refresh_token` | *str* | :heavy_check_mark: | The token for obtaining new access token. | \ No newline at end of file diff --git a/docs/models/authorizationmechanism.md b/docs/models/authorizationmechanism.md new file mode 100644 index 00000000..545b61d4 --- /dev/null +++ b/docs/models/authorizationmechanism.md @@ -0,0 +1,17 @@ +# AuthorizationMechanism + + +## Supported Types + +### `models.Td2` + +```python +value: models.Td2 = /* values here */ +``` + +### `models.Ldap` + +```python +value: models.Ldap = /* values here */ +``` + diff --git a/docs/models/aviationstack.md b/docs/models/aviationstack.md new file mode 100644 index 00000000..37d59f56 --- /dev/null +++ b/docs/models/aviationstack.md @@ -0,0 +1,8 @@ +# Aviationstack + + +## Values + +| Name | Value | +| --------------- | --------------- | +| `AVIATIONSTACK` | aviationstack | \ No newline at end of file diff --git a/docs/models/awinadvertiser.md b/docs/models/awinadvertiser.md new file mode 100644 index 00000000..bbc6af13 --- /dev/null +++ b/docs/models/awinadvertiser.md @@ -0,0 +1,8 @@ +# AwinAdvertiser + + +## Values + +| Name | Value | +| ----------------- | ----------------- | +| `AWIN_ADVERTISER` | awin-advertiser | \ No newline at end of file diff --git a/docs/models/azureopenai.md b/docs/models/azureopenai.md index 6e384841..dedd73c8 100644 --- a/docs/models/azureopenai.md +++ b/docs/models/azureopenai.md @@ -5,9 +5,9 @@ Use the Azure-hosted OpenAI API to embed text. This option is using the text-emb ## Fields -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | -| `api_base` | *str* | :heavy_check_mark: | The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource | https://your-resource-name.openai.azure.com | -| `deployment` | *str* | :heavy_check_mark: | The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource | your-resource-name | -| `openai_key` | *str* | :heavy_check_mark: | The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource | | -| `mode` | [Optional[models.DestinationAstraSchemasEmbeddingMode]](../models/destinationastraschemasembeddingmode.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | +| `api_base` | *str* | :heavy_check_mark: | The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource | https://your-resource-name.openai.azure.com | +| `deployment` | *str* | :heavy_check_mark: | The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource | your-resource-name | +| `openai_key` | *str* | :heavy_check_mark: | The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource | | +| `mode` | [Optional[models.DestinationAstraSchemasEmbeddingEmbeddingMode]](../models/destinationastraschemasembeddingembeddingmode.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/babelforce.md b/docs/models/babelforce.md new file mode 100644 index 00000000..8c1fa084 --- /dev/null +++ b/docs/models/babelforce.md @@ -0,0 +1,8 @@ +# Babelforce + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `BABELFORCE` | babelforce | \ No newline at end of file diff --git a/docs/models/basic.md b/docs/models/basic.md index 7749698f..c9327c7f 100644 --- a/docs/models/basic.md +++ b/docs/models/basic.md @@ -5,5 +5,5 @@ | Field | Type | Required | Description | | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `api_endpoint` | [Optional[models.SourceJotformAPIEndpoint]](../models/sourcejotformapiendpoint.md) | :heavy_minus_sign: | N/A | +| `api_endpoint` | [Optional[models.SourceJotformSchemasAPIEndpoint]](../models/sourcejotformschemasapiendpoint.md) | :heavy_minus_sign: | N/A | | `url_prefix` | [Optional[models.BaseURLPrefix]](../models/baseurlprefix.md) | :heavy_minus_sign: | You can access our API through the following URLs - Standard API Usage (Use the default API URL - https://api.jotform.com), For EU (Use the EU API URL - https://eu-api.jotform.com), For HIPAA (Use the HIPAA API URL - https://hipaa-api.jotform.com) | \ No newline at end of file diff --git a/docs/models/batchedstandardinserts.md b/docs/models/batchedstandardinserts.md index 90c0c632..52d7c0e2 100644 --- a/docs/models/batchedstandardinserts.md +++ b/docs/models/batchedstandardinserts.md @@ -5,6 +5,7 @@ Direct loading using batched SQL INSERT statements. This method uses the BigQuer ## Fields -| Field | Type | Required | Description | -| ------------------------------------ | ------------------------------------ | ------------------------------------ | ------------------------------------ | -| `method` | [models.Method](../models/method.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------- | ---------------------------------------------- | ---------------------------------------------- | ---------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `method` | [Optional[models.Method]](../models/method.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/bluetally.md b/docs/models/bluetally.md new file mode 100644 index 00000000..796ea92f --- /dev/null +++ b/docs/models/bluetally.md @@ -0,0 +1,8 @@ +# Bluetally + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `BLUETALLY` | bluetally | \ No newline at end of file diff --git a/docs/models/sourceintercomintercom.md b/docs/models/boldsign.md similarity index 57% rename from docs/models/sourceintercomintercom.md rename to docs/models/boldsign.md index 5b16ffdd..331dd279 100644 --- a/docs/models/sourceintercomintercom.md +++ b/docs/models/boldsign.md @@ -1,8 +1,8 @@ -# SourceIntercomIntercom +# Boldsign ## Values | Name | Value | | ---------- | ---------- | -| `INTERCOM` | intercom | \ No newline at end of file +| `BOLDSIGN` | boldsign | \ No newline at end of file diff --git a/docs/models/breezometer.md b/docs/models/breezometer.md new file mode 100644 index 00000000..ca3c9789 --- /dev/null +++ b/docs/models/breezometer.md @@ -0,0 +1,8 @@ +# Breezometer + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `BREEZOMETER` | breezometer | \ No newline at end of file diff --git a/docs/models/bulkload.md b/docs/models/bulkload.md new file mode 100644 index 00000000..effa2075 --- /dev/null +++ b/docs/models/bulkload.md @@ -0,0 +1,17 @@ +# BulkLoad + +Configuration details for using the BULK loading mechanism. + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `azure_blob_storage_account_name` | *str* | :heavy_check_mark: | The name of the Azure Blob Storage account. See: https://learn.microsoft.com/azure/storage/blobs/storage-blobs-introduction#storage-accounts | mystorageaccount | +| `azure_blob_storage_container_name` | *str* | :heavy_check_mark: | The name of the Azure Blob Storage container. See: https://learn.microsoft.com/azure/storage/blobs/storage-blobs-introduction#containers | mycontainer | +| `bulk_load_data_source` | *str* | :heavy_check_mark: | Specifies the external data source name configured in MSSQL, which references the Azure Blob container. See: https://learn.microsoft.com/sql/t-sql/statements/bulk-insert-transact-sql | MyAzureBlobStorage | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | +| `azure_blob_storage_account_key` | *Optional[str]* | :heavy_minus_sign: | The Azure blob storage account key. Mutually exclusive with a Shared Access Signature | Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd== | +| `bulk_load_validate_values_pre_load` | *Optional[bool]* | :heavy_minus_sign: | When enabled, Airbyte will validate all values before loading them into the destination table. This provides stronger data integrity guarantees but may significantly impact performance. | false | +| `load_type` | [Optional[models.DestinationMssqlLoadType]](../models/destinationmssqlloadtype.md) | :heavy_minus_sign: | N/A | | +| `shared_access_signature` | *Optional[str]* | :heavy_minus_sign: | A shared access signature (SAS) provides secure delegated access to resources in your storage account. See: https://learn.microsoft.com/azure/storage/common/storage-sas-overview.Mutually exclusive with an account key | sv=2021-08-06&st=2025-04-11T00%3A00%3A00Z&se=2025-04-12T00%3A00%3A00Z&sr=b&sp=rw&sig=abcdefghijklmnopqrstuvwxyz1234567890%2Fabcdefg%3D | \ No newline at end of file diff --git a/docs/models/sourcezendeskchatzendeskchat.md b/docs/models/captaindata.md similarity index 56% rename from docs/models/sourcezendeskchatzendeskchat.md rename to docs/models/captaindata.md index fa7129a9..73c0c004 100644 --- a/docs/models/sourcezendeskchatzendeskchat.md +++ b/docs/models/captaindata.md @@ -1,8 +1,8 @@ -# SourceZendeskChatZendeskChat +# CaptainData ## Values | Name | Value | | -------------- | -------------- | -| `ZENDESK_CHAT` | zendesk-chat | \ No newline at end of file +| `CAPTAIN_DATA` | captain-data | \ No newline at end of file diff --git a/docs/models/catalogtype.md b/docs/models/catalogtype.md index 0b4ef3e6..7dbfba0d 100644 --- a/docs/models/catalogtype.md +++ b/docs/models/catalogtype.md @@ -1,8 +1,25 @@ # CatalogType +Specifies the type of Iceberg catalog (e.g., NESSIE, GLUE, REST) and its associated configuration. -## Values -| Name | Value | -| ------ | ------ | -| `HIVE` | Hive | \ No newline at end of file +## Supported Types + +### `models.NessieCatalog` + +```python +value: models.NessieCatalog = /* values here */ +``` + +### `models.GlueCatalog` + +```python +value: models.GlueCatalog = /* values here */ +``` + +### `models.RestCatalog` + +```python +value: models.RestCatalog = /* values here */ +``` + diff --git a/docs/models/cdcdeletionmode.md b/docs/models/cdcdeletionmode.md new file mode 100644 index 00000000..36b04709 --- /dev/null +++ b/docs/models/cdcdeletionmode.md @@ -0,0 +1,11 @@ +# CDCDeletionMode + +Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `HARD_DELETE` | Hard delete | +| `SOFT_DELETE` | Soft delete | \ No newline at end of file diff --git a/docs/models/churnkey.md b/docs/models/churnkey.md new file mode 100644 index 00000000..f6321f98 --- /dev/null +++ b/docs/models/churnkey.md @@ -0,0 +1,8 @@ +# Churnkey + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `CHURNKEY` | churnkey | \ No newline at end of file diff --git a/docs/models/ciscomeraki.md b/docs/models/ciscomeraki.md new file mode 100644 index 00000000..8c59e8af --- /dev/null +++ b/docs/models/ciscomeraki.md @@ -0,0 +1,8 @@ +# CiscoMeraki + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `CISCO_MERAKI` | cisco-meraki | \ No newline at end of file diff --git a/docs/models/cohere.md b/docs/models/cohere.md index d38e947a..6b2df091 100644 --- a/docs/models/cohere.md +++ b/docs/models/cohere.md @@ -5,7 +5,7 @@ Use the Cohere API to embed text. ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -| `cohere_key` | *str* | :heavy_check_mark: | N/A | -| `mode` | [Optional[models.DestinationAstraMode]](../models/destinationastramode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | +| `cohere_key` | *str* | :heavy_check_mark: | N/A | +| `mode` | [Optional[models.DestinationAstraSchemasMode]](../models/destinationastraschemasmode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/coingeckocoins.md b/docs/models/coingeckocoins.md new file mode 100644 index 00000000..fef770f1 --- /dev/null +++ b/docs/models/coingeckocoins.md @@ -0,0 +1,8 @@ +# CoingeckoCoins + + +## Values + +| Name | Value | +| ----------------- | ----------------- | +| `COINGECKO_COINS` | coingecko-coins | \ No newline at end of file diff --git a/docs/models/configuredstreammapper.md b/docs/models/configuredstreammapper.md index e3ea460c..316c24a7 100644 --- a/docs/models/configuredstreammapper.md +++ b/docs/models/configuredstreammapper.md @@ -6,4 +6,5 @@ | Field | Type | Required | Description | | -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | | `mapper_configuration` | [models.MapperConfiguration](../models/mapperconfiguration.md) | :heavy_check_mark: | The values required to configure the mapper. | -| `type` | [models.StreamMapperType](../models/streammappertype.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| `type` | [models.StreamMapperType](../models/streammappertype.md) | :heavy_check_mark: | N/A | +| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/connectioncreaterequest.md b/docs/models/connectioncreaterequest.md index 3bc9cdf0..e064a09a 100644 --- a/docs/models/connectioncreaterequest.md +++ b/docs/models/connectioncreaterequest.md @@ -3,16 +3,17 @@ ## Fields -| Field | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `destination_id` | *str* | :heavy_check_mark: | N/A | | -| `source_id` | *str* | :heavy_check_mark: | N/A | | -| `configurations` | [Optional[models.StreamConfigurations]](../models/streamconfigurations.md) | :heavy_minus_sign: | A list of configured stream options for a connection. | | -| `data_residency` | [Optional[models.GeographyEnum]](../models/geographyenum.md) | :heavy_minus_sign: | N/A | | -| `name` | *Optional[str]* | :heavy_minus_sign: | Optional name of the connection | | -| `namespace_definition` | [Optional[models.NamespaceDefinitionEnum]](../models/namespacedefinitionenum.md) | :heavy_minus_sign: | Define the location where the data will be stored in the destination | | -| `namespace_format` | *Optional[str]* | :heavy_minus_sign: | Used when namespaceDefinition is 'custom_format'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'. | ${SOURCE_NAMESPACE} | -| `non_breaking_schema_updates_behavior` | [Optional[models.NonBreakingSchemaUpdatesBehaviorEnum]](../models/nonbreakingschemaupdatesbehaviorenum.md) | :heavy_minus_sign: | Set how Airbyte handles syncs when it detects a non-breaking schema change in the source | | -| `prefix` | *Optional[str]* | :heavy_minus_sign: | Prefix that will be prepended to the name of each stream when it is written to the destination (ex. “airbyte_” causes “projects” => “airbyte_projects”). | | -| `schedule` | [Optional[models.AirbyteAPIConnectionSchedule]](../models/airbyteapiconnectionschedule.md) | :heavy_minus_sign: | schedule for when the the connection should run, per the schedule type | | -| `status` | [Optional[models.ConnectionStatusEnum]](../models/connectionstatusenum.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `destination_id` | *str* | :heavy_check_mark: | N/A | | +| `source_id` | *str* | :heavy_check_mark: | N/A | | +| `configurations` | [Optional[models.StreamConfigurationsInput]](../models/streamconfigurationsinput.md) | :heavy_minus_sign: | A list of configured stream options for a connection. | | +| ~~`data_residency`~~ | *Optional[str]* | :heavy_minus_sign: | : warning: ** DEPRECATED **: We no longer support modifying dataResidency on Community and Enterprise connections. All connections will use the dataResidency of their associated workspace.. | | +| `name` | *Optional[str]* | :heavy_minus_sign: | Optional name of the connection | | +| `namespace_definition` | [Optional[models.NamespaceDefinitionEnum]](../models/namespacedefinitionenum.md) | :heavy_minus_sign: | Define the location where the data will be stored in the destination | | +| `namespace_format` | *Optional[str]* | :heavy_minus_sign: | Used when namespaceDefinition is 'custom_format'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'. | ${SOURCE_NAMESPACE} | +| `non_breaking_schema_updates_behavior` | [Optional[models.NonBreakingSchemaUpdatesBehaviorEnum]](../models/nonbreakingschemaupdatesbehaviorenum.md) | :heavy_minus_sign: | Set how Airbyte handles syncs when it detects a non-breaking schema change in the source | | +| `prefix` | *Optional[str]* | :heavy_minus_sign: | Prefix that will be prepended to the name of each stream when it is written to the destination (ex. “airbyte_” causes “projects” => “airbyte_projects”). | | +| `schedule` | [Optional[models.AirbyteAPIConnectionSchedule]](../models/airbyteapiconnectionschedule.md) | :heavy_minus_sign: | schedule for when the the connection should run, per the schedule type | | +| `status` | [Optional[models.ConnectionStatusEnum]](../models/connectionstatusenum.md) | :heavy_minus_sign: | N/A | | +| `tags` | List[[models.Tag](../models/tag.md)] | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/connectionpatchrequest.md b/docs/models/connectionpatchrequest.md index d4ce8bec..47387fc5 100644 --- a/docs/models/connectionpatchrequest.md +++ b/docs/models/connectionpatchrequest.md @@ -3,14 +3,15 @@ ## Fields -| Field | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `configurations` | [Optional[models.StreamConfigurations]](../models/streamconfigurations.md) | :heavy_minus_sign: | A list of configured stream options for a connection. | | -| `data_residency` | [Optional[models.GeographyEnumNoDefault]](../models/geographyenumnodefault.md) | :heavy_minus_sign: | N/A | | -| `name` | *Optional[str]* | :heavy_minus_sign: | Optional name of the connection | | -| `namespace_definition` | [Optional[models.NamespaceDefinitionEnumNoDefault]](../models/namespacedefinitionenumnodefault.md) | :heavy_minus_sign: | Define the location where the data will be stored in the destination | | -| `namespace_format` | *Optional[str]* | :heavy_minus_sign: | Used when namespaceDefinition is 'custom_format'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'. | ${SOURCE_NAMESPACE} | -| `non_breaking_schema_updates_behavior` | [Optional[models.NonBreakingSchemaUpdatesBehaviorEnumNoDefault]](../models/nonbreakingschemaupdatesbehaviorenumnodefault.md) | :heavy_minus_sign: | Set how Airbyte handles syncs when it detects a non-breaking schema change in the source | | -| `prefix` | *Optional[str]* | :heavy_minus_sign: | Prefix that will be prepended to the name of each stream when it is written to the destination (ex. “airbyte_” causes “projects” => “airbyte_projects”). | | -| `schedule` | [Optional[models.AirbyteAPIConnectionSchedule]](../models/airbyteapiconnectionschedule.md) | :heavy_minus_sign: | schedule for when the the connection should run, per the schedule type | | -| `status` | [Optional[models.ConnectionStatusEnum]](../models/connectionstatusenum.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `configurations` | [Optional[models.StreamConfigurationsInput]](../models/streamconfigurationsinput.md) | :heavy_minus_sign: | A list of configured stream options for a connection. | | +| ~~`data_residency`~~ | *Optional[str]* | :heavy_minus_sign: | : warning: ** DEPRECATED **: We no longer support modifying dataResidency on Community and Enterprise connections. All connections will use the dataResidency of their associated workspace.. | | +| `name` | *Optional[str]* | :heavy_minus_sign: | Optional name of the connection | | +| `namespace_definition` | [Optional[models.NamespaceDefinitionEnumNoDefault]](../models/namespacedefinitionenumnodefault.md) | :heavy_minus_sign: | Define the location where the data will be stored in the destination | | +| `namespace_format` | *Optional[str]* | :heavy_minus_sign: | Used when namespaceDefinition is 'custom_format'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'. | ${SOURCE_NAMESPACE} | +| `non_breaking_schema_updates_behavior` | [Optional[models.NonBreakingSchemaUpdatesBehaviorEnumNoDefault]](../models/nonbreakingschemaupdatesbehaviorenumnodefault.md) | :heavy_minus_sign: | Set how Airbyte handles syncs when it detects a non-breaking schema change in the source | | +| `prefix` | *Optional[str]* | :heavy_minus_sign: | Prefix that will be prepended to the name of each stream when it is written to the destination (ex. “airbyte_” causes “projects” => “airbyte_projects”). | | +| `schedule` | [Optional[models.AirbyteAPIConnectionSchedule]](../models/airbyteapiconnectionschedule.md) | :heavy_minus_sign: | schedule for when the the connection should run, per the schedule type | | +| `status` | [Optional[models.ConnectionStatusEnum]](../models/connectionstatusenum.md) | :heavy_minus_sign: | N/A | | +| `tags` | List[[models.Tag](../models/tag.md)] | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/connectionresponse.md b/docs/models/connectionresponse.md index 4b9c7765..3751a9df 100644 --- a/docs/models/connectionresponse.md +++ b/docs/models/connectionresponse.md @@ -15,8 +15,8 @@ Provides details of a single connection. | `schedule` | [models.ConnectionScheduleResponse](../models/connectionscheduleresponse.md) | :heavy_check_mark: | schedule for when the the connection should run, per the schedule type | | `source_id` | *str* | :heavy_check_mark: | N/A | | `status` | [models.ConnectionStatusEnum](../models/connectionstatusenum.md) | :heavy_check_mark: | N/A | +| `tags` | List[[models.Tag](../models/tag.md)] | :heavy_check_mark: | N/A | | `workspace_id` | *str* | :heavy_check_mark: | N/A | -| `data_residency` | [Optional[models.GeographyEnum]](../models/geographyenum.md) | :heavy_minus_sign: | N/A | | `namespace_definition` | [Optional[models.NamespaceDefinitionEnum]](../models/namespacedefinitionenum.md) | :heavy_minus_sign: | Define the location where the data will be stored in the destination | | `namespace_format` | *Optional[str]* | :heavy_minus_sign: | N/A | | `non_breaking_schema_updates_behavior` | [Optional[models.NonBreakingSchemaUpdatesBehaviorEnum]](../models/nonbreakingschemaupdatesbehaviorenum.md) | :heavy_minus_sign: | Set how Airbyte handles syncs when it detects a non-breaking schema change in the source | diff --git a/docs/models/connectionsyncmodeenum.md b/docs/models/connectionsyncmodeenum.md index 4da09f9d..1d7e7871 100644 --- a/docs/models/connectionsyncmodeenum.md +++ b/docs/models/connectionsyncmodeenum.md @@ -3,9 +3,14 @@ ## Values -| Name | Value | -| ----------------------------- | ----------------------------- | -| `FULL_REFRESH_OVERWRITE` | full_refresh_overwrite | -| `FULL_REFRESH_APPEND` | full_refresh_append | -| `INCREMENTAL_APPEND` | incremental_append | -| `INCREMENTAL_DEDUPED_HISTORY` | incremental_deduped_history | \ No newline at end of file +| Name | Value | +| -------------------------------- | -------------------------------- | +| `FULL_REFRESH_OVERWRITE` | full_refresh_overwrite | +| `FULL_REFRESH_OVERWRITE_DEDUPED` | full_refresh_overwrite_deduped | +| `FULL_REFRESH_APPEND` | full_refresh_append | +| `FULL_REFRESH_UPDATE` | full_refresh_update | +| `FULL_REFRESH_SOFT_DELETE` | full_refresh_soft_delete | +| `INCREMENTAL_APPEND` | incremental_append | +| `INCREMENTAL_DEDUPED_HISTORY` | incremental_deduped_history | +| `INCREMENTAL_UPDATE` | incremental_update | +| `INCREMENTAL_SOFT_DELETE` | incremental_soft_delete | \ No newline at end of file diff --git a/docs/models/copyrawfiles.md b/docs/models/copyrawfiles.md index f4dcfb81..01b72fc3 100644 --- a/docs/models/copyrawfiles.md +++ b/docs/models/copyrawfiles.md @@ -5,6 +5,7 @@ Copy raw files without parsing their contents. Bits are copied into the destinat ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -| `delivery_type` | [Optional[models.SourceS3DeliveryType]](../models/sources3deliverytype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `delivery_type` | [Optional[models.SourceGoogleDriveDeliveryType]](../models/sourcegoogledrivedeliverytype.md) | :heavy_minus_sign: | N/A | +| `preserve_directory_structure` | *Optional[bool]* | :heavy_minus_sign: | If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. | \ No newline at end of file diff --git a/docs/models/couchbase.md b/docs/models/couchbase.md new file mode 100644 index 00000000..2a14a203 --- /dev/null +++ b/docs/models/couchbase.md @@ -0,0 +1,8 @@ +# Couchbase + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `COUCHBASE` | couchbase | \ No newline at end of file diff --git a/docs/models/createdeclarativesourcedefinitionrequest.md b/docs/models/createdeclarativesourcedefinitionrequest.md new file mode 100644 index 00000000..e7b748d2 --- /dev/null +++ b/docs/models/createdeclarativesourcedefinitionrequest.md @@ -0,0 +1,9 @@ +# CreateDeclarativeSourceDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------- | --------------------------------- | --------------------------------- | --------------------------------- | +| `manifest` | *Any* | :heavy_check_mark: | Low code CDK manifest JSON object | +| `name` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/createdefinitionrequest.md b/docs/models/createdefinitionrequest.md new file mode 100644 index 00000000..bbb0ea0d --- /dev/null +++ b/docs/models/createdefinitionrequest.md @@ -0,0 +1,11 @@ +# CreateDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------- | ------------------- | ------------------- | ------------------- | +| `docker_image_tag` | *str* | :heavy_check_mark: | N/A | +| `docker_repository` | *str* | :heavy_check_mark: | N/A | +| `name` | *str* | :heavy_check_mark: | N/A | +| `documentation_url` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/credentials.md b/docs/models/credentials.md index 5dfcdf0d..cf947296 100644 --- a/docs/models/credentials.md +++ b/docs/models/credentials.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ----------------------------------------------------- | ----------------------------------------------------- | ----------------------------------------------------- | ----------------------------------------------------- | -| `client_id` | *Optional[str]* | :heavy_minus_sign: | The client ID of the Airtable developer application. | -| `client_secret` | *Optional[str]* | :heavy_minus_sign: | The client secret the Airtable developer application. | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | +| `client_id` | *Optional[str]* | :heavy_minus_sign: | The client ID of the Airtable developer application. | +| `client_secret` | *Optional[str]* | :heavy_minus_sign: | The client secret of the Airtable developer application. | \ No newline at end of file diff --git a/docs/models/csvcommaseparatedvalues.md b/docs/models/csvcommaseparatedvalues.md index 51bbbecd..21091552 100644 --- a/docs/models/csvcommaseparatedvalues.md +++ b/docs/models/csvcommaseparatedvalues.md @@ -3,8 +3,8 @@ ## Fields -| Field | Type | Required | Description | -| ----------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- | -| `file_extension` | *Optional[bool]* | :heavy_minus_sign: | Add file extensions to the output file. | -| `flattening` | [Optional[models.NormalizationFlattening]](../models/normalizationflattening.md) | :heavy_minus_sign: | Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details. | -| `format_type` | [models.FormatType](../models/formattype.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------ | ------------------------------------------------------ | ------------------------------------------------------ | ------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `flattening` | [Optional[models.Flattening]](../models/flattening.md) | :heavy_minus_sign: | N/A | +| `format_type` | [Optional[models.FormatType]](../models/formattype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/cursormethod.md b/docs/models/cursormethod.md new file mode 100644 index 00000000..12e56cfe --- /dev/null +++ b/docs/models/cursormethod.md @@ -0,0 +1,8 @@ +# CursorMethod + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `USER_DEFINED` | user_defined | \ No newline at end of file diff --git a/docs/models/customerly.md b/docs/models/customerly.md new file mode 100644 index 00000000..acfdac3a --- /dev/null +++ b/docs/models/customerly.md @@ -0,0 +1,8 @@ +# Customerly + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `CUSTOMERLY` | customerly | \ No newline at end of file diff --git a/docs/models/datasetlocation.md b/docs/models/datasetlocation.md index 4c86f109..dc4bc593 100644 --- a/docs/models/datasetlocation.md +++ b/docs/models/datasetlocation.md @@ -7,8 +7,9 @@ The location of the dataset. Warning: Changes made after creation will not be ap | Name | Value | | ------------------------- | ------------------------- | -| `US` | US | | `EU` | EU | +| `US` | US | +| `AFRICA_SOUTH1` | africa-south1 | | `ASIA_EAST1` | asia-east1 | | `ASIA_EAST2` | asia-east2 | | `ASIA_NORTHEAST1` | asia-northeast1 | @@ -20,30 +21,29 @@ The location of the dataset. Warning: Changes made after creation will not be ap | `ASIA_SOUTHEAST2` | asia-southeast2 | | `AUSTRALIA_SOUTHEAST1` | australia-southeast1 | | `AUSTRALIA_SOUTHEAST2` | australia-southeast2 | -| `EUROPE_CENTRAL1` | europe-central1 | | `EUROPE_CENTRAL2` | europe-central2 | | `EUROPE_NORTH1` | europe-north1 | +| `EUROPE_NORTH2` | europe-north2 | | `EUROPE_SOUTHWEST1` | europe-southwest1 | | `EUROPE_WEST1` | europe-west1 | | `EUROPE_WEST2` | europe-west2 | | `EUROPE_WEST3` | europe-west3 | | `EUROPE_WEST4` | europe-west4 | | `EUROPE_WEST6` | europe-west6 | -| `EUROPE_WEST7` | europe-west7 | | `EUROPE_WEST8` | europe-west8 | | `EUROPE_WEST9` | europe-west9 | +| `EUROPE_WEST10` | europe-west10 | | `EUROPE_WEST12` | europe-west12 | | `ME_CENTRAL1` | me-central1 | | `ME_CENTRAL2` | me-central2 | | `ME_WEST1` | me-west1 | | `NORTHAMERICA_NORTHEAST1` | northamerica-northeast1 | | `NORTHAMERICA_NORTHEAST2` | northamerica-northeast2 | +| `NORTHAMERICA_SOUTH1` | northamerica-south1 | | `SOUTHAMERICA_EAST1` | southamerica-east1 | | `SOUTHAMERICA_WEST1` | southamerica-west1 | | `US_CENTRAL1` | us-central1 | | `US_EAST1` | us-east1 | -| `US_EAST2` | us-east2 | -| `US_EAST3` | us-east3 | | `US_EAST4` | us-east4 | | `US_EAST5` | us-east5 | | `US_SOUTH1` | us-south1 | diff --git a/docs/models/days.md b/docs/models/days.md new file mode 100644 index 00000000..7092badc --- /dev/null +++ b/docs/models/days.md @@ -0,0 +1,18 @@ +# Days + +The number of days of data for market chart. + + + +## Values + +| Name | Value | +| ------------------------------ | ------------------------------ | +| `ONE` | 1 | +| `SEVEN` | 7 | +| `FOURTEEN` | 14 | +| `THIRTY` | 30 | +| `NINETY` | 90 | +| `ONE_HUNDRED_AND_EIGHTY` | 180 | +| `THREE_HUNDRED_AND_SIXTY_FIVE` | 365 | +| `MAX` | max | \ No newline at end of file diff --git a/docs/models/declarativesourcedefinitionresponse.md b/docs/models/declarativesourcedefinitionresponse.md new file mode 100644 index 00000000..b603b1be --- /dev/null +++ b/docs/models/declarativesourcedefinitionresponse.md @@ -0,0 +1,11 @@ +# DeclarativeSourceDefinitionResponse + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------- | --------------------------------- | --------------------------------- | --------------------------------- | +| `id` | *str* | :heavy_check_mark: | N/A | +| `manifest` | *Any* | :heavy_check_mark: | Low code CDK manifest JSON object | +| `name` | *str* | :heavy_check_mark: | N/A | +| `version` | *int* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/declarativesourcedefinitionsresponse.md b/docs/models/declarativesourcedefinitionsresponse.md new file mode 100644 index 00000000..98deb6db --- /dev/null +++ b/docs/models/declarativesourcedefinitionsresponse.md @@ -0,0 +1,10 @@ +# DeclarativeSourceDefinitionsResponse + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | +| `data` | List[[models.DeclarativeSourceDefinitionResponse](../models/declarativesourcedefinitionresponse.md)] | :heavy_check_mark: | N/A | +| `next` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `previous` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/s3glue.md b/docs/models/deepset.md similarity index 65% rename from docs/models/s3glue.md rename to docs/models/deepset.md index 68648f3a..7bbf6e73 100644 --- a/docs/models/s3glue.md +++ b/docs/models/deepset.md @@ -1,8 +1,8 @@ -# S3Glue +# Deepset ## Values | Name | Value | | --------- | --------- | -| `S3_GLUE` | s3-glue | \ No newline at end of file +| `DEEPSET` | deepset | \ No newline at end of file diff --git a/docs/models/definitionofconversioncountinreports.md b/docs/models/definitionofconversioncountinreports.md new file mode 100644 index 00000000..25811175 --- /dev/null +++ b/docs/models/definitionofconversioncountinreports.md @@ -0,0 +1,11 @@ +# DefinitionOfConversionCountInReports + +The definition of conversion count in reports. See the docs. + + +## Values + +| Name | Value | +| ----------------- | ----------------- | +| `CLICK_VIEW_TIME` | click/view_time | +| `CONVERSION_TIME` | conversion_time | \ No newline at end of file diff --git a/docs/models/definitionresponse.md b/docs/models/definitionresponse.md new file mode 100644 index 00000000..60f73e5b --- /dev/null +++ b/docs/models/definitionresponse.md @@ -0,0 +1,14 @@ +# DefinitionResponse + +Provides details of a single connector definition. + + +## Fields + +| Field | Type | Required | Description | +| ------------------- | ------------------- | ------------------- | ------------------- | +| `docker_image_tag` | *str* | :heavy_check_mark: | N/A | +| `docker_repository` | *str* | :heavy_check_mark: | N/A | +| `id` | *str* | :heavy_check_mark: | N/A | +| `name` | *str* | :heavy_check_mark: | N/A | +| `documentation_url` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/definitionsresponse.md b/docs/models/definitionsresponse.md new file mode 100644 index 00000000..124e097c --- /dev/null +++ b/docs/models/definitionsresponse.md @@ -0,0 +1,10 @@ +# DefinitionsResponse + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------ | ------------------------------------------------------------------ | ------------------------------------------------------------------ | ------------------------------------------------------------------ | +| `data` | List[[models.DefinitionResponse](../models/definitionresponse.md)] | :heavy_check_mark: | N/A | +| `next` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `previous` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/deliverymethod.md b/docs/models/deliverymethod.md index 01a9897f..48c7cc95 100644 --- a/docs/models/deliverymethod.md +++ b/docs/models/deliverymethod.md @@ -15,3 +15,9 @@ value: models.ReplicateRecords = /* values here */ value: models.CopyRawFiles = /* values here */ ``` +### `models.ReplicatePermissionsACL` + +```python +value: models.ReplicatePermissionsACL = /* values here */ +``` + diff --git a/docs/models/destinationastramode.md b/docs/models/destinationastramode.md index 036a5804..e0881dfb 100644 --- a/docs/models/destinationastramode.md +++ b/docs/models/destinationastramode.md @@ -5,4 +5,4 @@ | Name | Value | | -------- | -------- | -| `COHERE` | cohere | \ No newline at end of file +| `OPENAI` | openai | \ No newline at end of file diff --git a/docs/models/destinationastraschemasembeddingembedding1mode.md b/docs/models/destinationastraschemasembeddingembedding1mode.md deleted file mode 100644 index b2c17679..00000000 --- a/docs/models/destinationastraschemasembeddingembedding1mode.md +++ /dev/null @@ -1,8 +0,0 @@ -# DestinationAstraSchemasEmbeddingEmbedding1Mode - - -## Values - -| Name | Value | -| -------- | -------- | -| `OPENAI` | openai | \ No newline at end of file diff --git a/docs/models/destinationastraschemasembeddingembedding5mode.md b/docs/models/destinationastraschemasembeddingembedding5mode.md new file mode 100644 index 00000000..e0829811 --- /dev/null +++ b/docs/models/destinationastraschemasembeddingembedding5mode.md @@ -0,0 +1,8 @@ +# DestinationAstraSchemasEmbeddingEmbedding5Mode + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `OPENAI_COMPATIBLE` | openai_compatible | \ No newline at end of file diff --git a/docs/models/destinationastraschemasembeddingembeddingmode.md b/docs/models/destinationastraschemasembeddingembeddingmode.md index 9feeb8dc..0baed737 100644 --- a/docs/models/destinationastraschemasembeddingembeddingmode.md +++ b/docs/models/destinationastraschemasembeddingembeddingmode.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------------------- | ------------------- | -| `OPENAI_COMPATIBLE` | openai_compatible | \ No newline at end of file +| Name | Value | +| -------------- | -------------- | +| `AZURE_OPENAI` | azure_openai | \ No newline at end of file diff --git a/docs/models/destinationastraschemasembeddingmode.md b/docs/models/destinationastraschemasembeddingmode.md index edbb46b4..70c448aa 100644 --- a/docs/models/destinationastraschemasembeddingmode.md +++ b/docs/models/destinationastraschemasembeddingmode.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| -------------- | -------------- | -| `AZURE_OPENAI` | azure_openai | \ No newline at end of file +| Name | Value | +| ------ | ------ | +| `FAKE` | fake | \ No newline at end of file diff --git a/docs/models/destinationastraschemasmode.md b/docs/models/destinationastraschemasmode.md index b83aed3d..a36f100f 100644 --- a/docs/models/destinationastraschemasmode.md +++ b/docs/models/destinationastraschemasmode.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------ | ------ | -| `FAKE` | fake | \ No newline at end of file +| Name | Value | +| -------- | -------- | +| `COHERE` | cohere | \ No newline at end of file diff --git a/docs/models/destinationazureblobstorage.md b/docs/models/destinationazureblobstorage.md index c1a47745..04bbc820 100644 --- a/docs/models/destinationazureblobstorage.md +++ b/docs/models/destinationazureblobstorage.md @@ -3,13 +3,13 @@ ## Fields -| Field | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `azure_blob_storage_account_key` | *str* | :heavy_check_mark: | The Azure blob storage account key. | Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd== | -| `azure_blob_storage_account_name` | *str* | :heavy_check_mark: | The account's name of the Azure Blob Storage. | airbyte5storage | -| `format` | [models.OutputFormat](../models/outputformat.md) | :heavy_check_mark: | Output data format | | -| `azure_blob_storage_container_name` | *Optional[str]* | :heavy_minus_sign: | The name of the Azure blob storage container. If not exists - will be created automatically. May be empty, then will be created automatically airbytecontainer+timestamp | airbytetescontainername | -| `azure_blob_storage_endpoint_domain_name` | *Optional[str]* | :heavy_minus_sign: | This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example. | blob.core.windows.net | -| `azure_blob_storage_output_buffer_size` | *Optional[int]* | :heavy_minus_sign: | The amount of megabytes to buffer for the output stream to Azure. This will impact memory footprint on workers, but may need adjustment for performance and appropriate block size in Azure. | 5 | -| `azure_blob_storage_spill_size` | *Optional[int]* | :heavy_minus_sign: | The amount of megabytes after which the connector should spill the records in a new blob object. Make sure to configure size greater than individual records. Enter 0 if not applicable | 500 | -| `destination_type` | [models.DestinationAzureBlobStorageAzureBlobStorage](../models/destinationazureblobstorageazureblobstorage.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `azure_blob_storage_account_name` | *str* | :heavy_check_mark: | The name of the Azure Blob Storage Account. Read more here. | mystorageaccount | +| `azure_blob_storage_container_name` | *str* | :heavy_check_mark: | The name of the Azure Blob Storage Container. Read more here. | mycontainer | +| `format` | [models.OutputFormat](../models/outputformat.md) | :heavy_check_mark: | Format of the data output. | | +| `azure_blob_storage_account_key` | *Optional[str]* | :heavy_minus_sign: | The Azure blob storage account key. If you set this value, you must not set the Shared Access Signature. | Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd== | +| `azure_blob_storage_endpoint_domain_name` | *Optional[str]* | :heavy_minus_sign: | This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example. | | +| `azure_blob_storage_spill_size` | *Optional[int]* | :heavy_minus_sign: | The amount of megabytes after which the connector should spill the records in a new blob object. Make sure to configure size greater than individual records. Enter 0 if not applicable. | | +| `destination_type` | [models.DestinationAzureBlobStorageAzureBlobStorage](../models/destinationazureblobstorageazureblobstorage.md) | :heavy_check_mark: | N/A | | +| `shared_access_signature` | *Optional[str]* | :heavy_minus_sign: | A shared access signature (SAS) provides secure delegated access to resources in your storage account. Read more here. If you set this value, you must not set the account key. | sv=2021-08-06&st=2025-04-11T00%3A00%3A00Z&se=2025-04-12T00%3A00%3A00Z&sr=b&sp=rw&sig=abcdefghijklmnopqrstuvwxyz1234567890%2Fabcdefg%3D | \ No newline at end of file diff --git a/docs/models/normalizationflattening.md b/docs/models/destinationazureblobstorageflattening.md similarity index 61% rename from docs/models/normalizationflattening.md rename to docs/models/destinationazureblobstorageflattening.md index 3a1c2086..fdd40f9b 100644 --- a/docs/models/normalizationflattening.md +++ b/docs/models/destinationazureblobstorageflattening.md @@ -1,6 +1,4 @@ -# NormalizationFlattening - -Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details. +# DestinationAzureBlobStorageFlattening ## Values diff --git a/docs/models/destinationazureblobstoragejsonlinesnewlinedelimitedjson.md b/docs/models/destinationazureblobstoragejsonlinesnewlinedelimitedjson.md index 76cda9c8..31657f45 100644 --- a/docs/models/destinationazureblobstoragejsonlinesnewlinedelimitedjson.md +++ b/docs/models/destinationazureblobstoragejsonlinesnewlinedelimitedjson.md @@ -3,7 +3,8 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -| `file_extension` | *Optional[bool]* | :heavy_minus_sign: | Add file extensions to the output file. | -| `format_type` | [models.DestinationAzureBlobStorageFormatType](../models/destinationazureblobstorageformattype.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `flattening` | [Optional[models.DestinationAzureBlobStorageFlattening]](../models/destinationazureblobstorageflattening.md) | :heavy_minus_sign: | N/A | +| `format_type` | [Optional[models.DestinationAzureBlobStorageFormatType]](../models/destinationazureblobstorageformattype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationbigquery.md b/docs/models/destinationbigquery.md index 4d3f3a68..cb70ef5a 100644 --- a/docs/models/destinationbigquery.md +++ b/docs/models/destinationbigquery.md @@ -3,15 +3,14 @@ ## Fields -| Field | Type | Required | Description | Example | -| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `dataset_id` | *str* | :heavy_check_mark: | The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here. | | -| `dataset_location` | [models.DatasetLocation](../models/datasetlocation.md) | :heavy_check_mark: | The location of the dataset. Warning: Changes made after creation will not be applied. Read more here. | | -| `project_id` | *str* | :heavy_check_mark: | The GCP project ID for the project containing the target BigQuery dataset. Read more here. | | -| `big_query_client_buffer_size_mb` | *Optional[int]* | :heavy_minus_sign: | Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here. | 15 | -| `credentials_json` | *Optional[str]* | :heavy_minus_sign: | The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty. | | -| `destination_type` | [models.Bigquery](../models/bigquery.md) | :heavy_check_mark: | N/A | | -| `disable_type_dedupe` | *Optional[bool]* | :heavy_minus_sign: | Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions | | -| `loading_method` | [Optional[models.LoadingMethod]](../models/loadingmethod.md) | :heavy_minus_sign: | The way data will be uploaded to BigQuery. | | -| `raw_data_dataset` | *Optional[str]* | :heavy_minus_sign: | The dataset to write raw tables into (default: airbyte_internal) | | -| `transformation_priority` | [Optional[models.TransformationQueryRunType]](../models/transformationqueryruntype.md) | :heavy_minus_sign: | Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly. | | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `dataset_id` | *str* | :heavy_check_mark: | The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here. | +| `dataset_location` | [models.DatasetLocation](../models/datasetlocation.md) | :heavy_check_mark: | The location of the dataset. Warning: Changes made after creation will not be applied. Read more here. | +| `project_id` | *str* | :heavy_check_mark: | The GCP project ID for the project containing the target BigQuery dataset. Read more here. | +| `cdc_deletion_mode` | [Optional[models.CDCDeletionMode]](../models/cdcdeletionmode.md) | :heavy_minus_sign: | Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. | +| `credentials_json` | *Optional[str]* | :heavy_minus_sign: | The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty. | +| `destination_type` | [models.Bigquery](../models/bigquery.md) | :heavy_check_mark: | N/A | +| `disable_type_dedupe` | *Optional[bool]* | :heavy_minus_sign: | Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector. | +| `loading_method` | [Optional[models.LoadingMethod]](../models/loadingmethod.md) | :heavy_minus_sign: | The way data will be uploaded to BigQuery. | +| `raw_data_dataset` | *Optional[str]* | :heavy_minus_sign: | Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to "airbyte_internal". | \ No newline at end of file diff --git a/docs/models/destinationbigqueryhmackey.md b/docs/models/destinationbigqueryhmackey.md index a32b60f2..ed15ca54 100644 --- a/docs/models/destinationbigqueryhmackey.md +++ b/docs/models/destinationbigqueryhmackey.md @@ -7,4 +7,5 @@ | --------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------- | | `hmac_key_access_id` | *str* | :heavy_check_mark: | HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long. | 1234567890abcdefghij1234 | | `hmac_key_secret` | *str* | :heavy_check_mark: | The corresponding secret for the access ID. It is a 40-character base-64 encoded string. | 1234567890abcdefghij1234567890ABCDEFGHIJ | -| `credential_type` | [models.DestinationBigqueryCredentialType](../models/destinationbigquerycredentialtype.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | +| `credential_type` | [Optional[models.DestinationBigqueryCredentialType]](../models/destinationbigquerycredentialtype.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/destinationclickhouse.md b/docs/models/destinationclickhouse.md index d52d9270..31d8d803 100644 --- a/docs/models/destinationclickhouse.md +++ b/docs/models/destinationclickhouse.md @@ -3,15 +3,15 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `database` | *str* | :heavy_check_mark: | Name of the database. | | -| `host` | *str* | :heavy_check_mark: | Hostname of the database. | | -| `username` | *str* | :heavy_check_mark: | Username to use to access the database. | | -| `destination_type` | [models.Clickhouse](../models/clickhouse.md) | :heavy_check_mark: | N/A | | -| `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). | | -| `password` | *Optional[str]* | :heavy_minus_sign: | Password associated with the username. | | -| `port` | *Optional[int]* | :heavy_minus_sign: | HTTP port of the database. | 8123 | -| `raw_data_schema` | *Optional[str]* | :heavy_minus_sign: | The schema to write raw tables into (default: airbyte_internal) | | -| `ssl` | *Optional[bool]* | :heavy_minus_sign: | Encrypt data using SSL. | | -| `tunnel_method` | [Optional[models.SSHTunnelMethod]](../models/sshtunnelmethod.md) | :heavy_minus_sign: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | | \ No newline at end of file +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `host` | *str* | :heavy_check_mark: | Hostname of the database. | +| `password` | *str* | :heavy_check_mark: | Password associated with the username. | +| `database` | *Optional[str]* | :heavy_minus_sign: | Name of the database. | +| `destination_type` | [models.Clickhouse](../models/clickhouse.md) | :heavy_check_mark: | N/A | +| `enable_json` | *Optional[bool]* | :heavy_minus_sign: | Use the JSON type for Object fields. If disabled, the JSON will be converted to a string. | +| `port` | *Optional[str]* | :heavy_minus_sign: | HTTP port of the database. Default(s) HTTP: 8123 — HTTPS: 8443 | +| `protocol` | [Optional[models.Protocol]](../models/protocol.md) | :heavy_minus_sign: | Protocol for the database connection string. | +| `record_window_size` | *Optional[int]* | :heavy_minus_sign: | Warning: Tuning this parameter can impact the performances. The maximum number of records that should be written to a batch. The batch size limit is still limited to 70 Mb | +| `tunnel_method` | [Optional[models.SSHTunnelMethod]](../models/sshtunnelmethod.md) | :heavy_minus_sign: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | +| `username` | *Optional[str]* | :heavy_minus_sign: | Username to use to access the database. | \ No newline at end of file diff --git a/docs/models/destinationclickhouseschemastunnelmethod.md b/docs/models/destinationclickhouseschemastunnelmethod.md index 747b5691..e72e5fe0 100644 --- a/docs/models/destinationclickhouseschemastunnelmethod.md +++ b/docs/models/destinationclickhouseschemastunnelmethod.md @@ -1,7 +1,5 @@ # DestinationClickhouseSchemasTunnelMethod -Connect through a jump server tunnel host using username and password authentication - ## Values diff --git a/docs/models/destinationclickhousetunnelmethod.md b/docs/models/destinationclickhousetunnelmethod.md index 1eda5de5..4a4eabfc 100644 --- a/docs/models/destinationclickhousetunnelmethod.md +++ b/docs/models/destinationclickhousetunnelmethod.md @@ -1,7 +1,5 @@ # DestinationClickhouseTunnelMethod -Connect through a jump server tunnel host using username and ssh key - ## Values diff --git a/docs/models/destinationconfiguration.md b/docs/models/destinationconfiguration.md index db412f68..1212bc8a 100644 --- a/docs/models/destinationconfiguration.md +++ b/docs/models/destinationconfiguration.md @@ -47,12 +47,24 @@ value: models.DestinationClickhouse = /* values here */ value: models.DestinationConvex = /* values here */ ``` +### `models.DestinationCustomerIo` + +```python +value: models.DestinationCustomerIo = /* values here */ +``` + ### `models.DestinationDatabricks` ```python value: models.DestinationDatabricks = /* values here */ ``` +### `models.DestinationDeepset` + +```python +value: models.DestinationDeepset = /* values here */ +``` + ### `models.DestinationDevNull` ```python @@ -95,10 +107,10 @@ value: models.DestinationFirestore = /* values here */ value: models.DestinationGcs = /* values here */ ``` -### `models.DestinationIceberg` +### `models.DestinationHubspot` ```python -value: models.DestinationIceberg = /* values here */ +value: models.DestinationHubspot = /* values here */ ``` ### `models.DestinationMilvus` @@ -125,6 +137,12 @@ value: models.DestinationMotherduck = /* values here */ value: models.DestinationMssql = /* values here */ ``` +### `models.DestinationMssqlV2` + +```python +value: models.DestinationMssqlV2 = /* values here */ +``` + ### `models.DestinationMysql` ```python @@ -185,10 +203,16 @@ value: models.DestinationRedshift = /* values here */ value: models.DestinationS3 = /* values here */ ``` -### `models.DestinationS3Glue` +### `models.DestinationS3DataLake` + +```python +value: models.DestinationS3DataLake = /* values here */ +``` + +### `models.DestinationSalesforce` ```python -value: models.DestinationS3Glue = /* values here */ +value: models.DestinationSalesforce = /* values here */ ``` ### `models.DestinationSftpJSON` @@ -209,6 +233,12 @@ value: models.DestinationSnowflake = /* values here */ value: models.DestinationSnowflakeCortex = /* values here */ ``` +### `models.DestinationSurrealdb` + +```python +value: models.DestinationSurrealdb = /* values here */ +``` + ### `models.DestinationTeradata` ```python diff --git a/docs/models/destinationcreaterequest.md b/docs/models/destinationcreaterequest.md index 403ebebd..8607696f 100644 --- a/docs/models/destinationcreaterequest.md +++ b/docs/models/destinationcreaterequest.md @@ -3,9 +3,10 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | -| `configuration` | [models.DestinationConfiguration](../models/destinationconfiguration.md) | :heavy_check_mark: | The values required to configure the destination. | {
"user": "charles"
} | -| `name` | *str* | :heavy_check_mark: | Name of the destination e.g. dev-mysql-instance. | | -| `workspace_id` | *str* | :heavy_check_mark: | N/A | | -| `definition_id` | *Optional[str]* | :heavy_minus_sign: | The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `configuration` | [models.DestinationConfiguration](../models/destinationconfiguration.md) | :heavy_check_mark: | The values required to configure the destination. | {
"user": "charles"
} | +| `name` | *str* | :heavy_check_mark: | Name of the destination e.g. dev-mysql-instance. | | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | | +| `definition_id` | *Optional[str]* | :heavy_minus_sign: | The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. | | +| `resource_allocation` | [Optional[models.ScopedResourceRequirements]](../models/scopedresourcerequirements.md) | :heavy_minus_sign: | actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level. | | \ No newline at end of file diff --git a/docs/models/destinationcustomerio.md b/docs/models/destinationcustomerio.md new file mode 100644 index 00000000..f31d1d00 --- /dev/null +++ b/docs/models/destinationcustomerio.md @@ -0,0 +1,10 @@ +# DestinationCustomerIo + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | +| `credentials` | [models.DestinationCustomerIoCredentials](../models/destinationcustomeriocredentials.md) | :heavy_check_mark: | Enter the site ID and API key to authenticate. | +| `destination_type` | [models.CustomerIo](../models/customerio.md) | :heavy_check_mark: | N/A | +| `object_storage_config` | [Optional[models.ObjectStorageConfiguration]](../models/objectstorageconfiguration.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationcustomeriocredentials.md b/docs/models/destinationcustomeriocredentials.md new file mode 100644 index 00000000..3f92c22a --- /dev/null +++ b/docs/models/destinationcustomeriocredentials.md @@ -0,0 +1,12 @@ +# DestinationCustomerIoCredentials + +Enter the site ID and API key to authenticate. + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Enter your Customer IO API Key. | +| `site_id` | *str* | :heavy_check_mark: | Enter your Customer IO Site ID. | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationcustomerios3.md b/docs/models/destinationcustomerios3.md new file mode 100644 index 00000000..6a09ba0a --- /dev/null +++ b/docs/models/destinationcustomerios3.md @@ -0,0 +1,16 @@ +# DestinationCustomerIoS3 + + +## Fields + +| Field | Type | Required | Description | Example | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `bucket_path` | *str* | :heavy_check_mark: | All files in the bucket will be prefixed by this. | prefix/ | +| `s3_bucket_name` | *str* | :heavy_check_mark: | The name of the S3 bucket. Read more here. | airbyte_sync | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | +| `access_key_id` | *Optional[str]* | :heavy_minus_sign: | The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here. | A012345678910EXAMPLE | +| `role_arn` | *Optional[str]* | :heavy_minus_sign: | The ARN of the AWS role to assume. Only usable in Airbyte Cloud. | arn:aws:iam::123456789:role/ExternalIdIsYourWorkspaceId | +| `s3_bucket_region` | [Optional[models.DestinationCustomerIoS3BucketRegion]](../models/destinationcustomerios3bucketregion.md) | :heavy_minus_sign: | The region of the S3 bucket. See here for all region codes. | us-east-1 | +| `s3_endpoint` | *Optional[str]* | :heavy_minus_sign: | Your S3 endpoint url. Read more here | http://localhost:9000 | +| `secret_access_key` | *Optional[str]* | :heavy_minus_sign: | The corresponding secret to the access key ID. Read more here | a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY | +| `storage_type` | [Optional[models.DestinationCustomerIoStorageType]](../models/destinationcustomeriostoragetype.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/destinations3glues3bucketregion.md b/docs/models/destinationcustomerios3bucketregion.md similarity index 97% rename from docs/models/destinations3glues3bucketregion.md rename to docs/models/destinationcustomerios3bucketregion.md index c48b9a91..46c4f3f8 100644 --- a/docs/models/destinations3glues3bucketregion.md +++ b/docs/models/destinationcustomerios3bucketregion.md @@ -1,4 +1,4 @@ -# DestinationS3GlueS3BucketRegion +# DestinationCustomerIoS3BucketRegion The region of the S3 bucket. See here for all region codes. diff --git a/docs/models/destinationcustomeriostoragetype.md b/docs/models/destinationcustomeriostoragetype.md new file mode 100644 index 00000000..7cf37cd3 --- /dev/null +++ b/docs/models/destinationcustomeriostoragetype.md @@ -0,0 +1,8 @@ +# DestinationCustomerIoStorageType + + +## Values + +| Name | Value | +| ----- | ----- | +| `S3` | S3 | \ No newline at end of file diff --git a/docs/models/destinationdeepset.md b/docs/models/destinationdeepset.md new file mode 100644 index 00000000..d4c10485 --- /dev/null +++ b/docs/models/destinationdeepset.md @@ -0,0 +1,12 @@ +# DestinationDeepset + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your deepset cloud API key | +| `workspace` | *str* | :heavy_check_mark: | Name of workspace to which to sync the data. | +| `base_url` | *Optional[str]* | :heavy_minus_sign: | URL of deepset Cloud API (e.g. https://api.cloud.deepset.ai, https://api.us.deepset.ai, etc). Defaults to https://api.cloud.deepset.ai. | +| `destination_type` | [models.Deepset](../models/deepset.md) | :heavy_check_mark: | N/A | +| `retries` | *Optional[float]* | :heavy_minus_sign: | Number of times to retry an action before giving up. | \ No newline at end of file diff --git a/docs/models/destinationelasticsearch.md b/docs/models/destinationelasticsearch.md index 1993f52a..013d3af9 100644 --- a/docs/models/destinationelasticsearch.md +++ b/docs/models/destinationelasticsearch.md @@ -9,5 +9,6 @@ | `authentication_method` | [Optional[models.AuthenticationMethod]](../models/authenticationmethod.md) | :heavy_minus_sign: | The type of authentication to be used | | `ca_certificate` | *Optional[str]* | :heavy_minus_sign: | CA certificate | | `destination_type` | [models.Elasticsearch](../models/elasticsearch.md) | :heavy_check_mark: | N/A | +| `path_prefix` | *Optional[str]* | :heavy_minus_sign: | The Path Prefix of the Elasticsearch server | | `tunnel_method` | [Optional[models.DestinationElasticsearchSSHTunnelMethod]](../models/destinationelasticsearchsshtunnelmethod.md) | :heavy_minus_sign: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | | `upsert` | *Optional[bool]* | :heavy_minus_sign: | If a primary key identifier is defined in the source, an upsert will be performed using the primary key value as the elasticsearch doc id. Does not support composite primary keys. | \ No newline at end of file diff --git a/docs/models/destinationelasticsearchnone.md b/docs/models/destinationelasticsearchnone.md new file mode 100644 index 00000000..25b7bc1e --- /dev/null +++ b/docs/models/destinationelasticsearchnone.md @@ -0,0 +1,10 @@ +# DestinationElasticsearchNone + +No authentication will be used + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | +| `method` | [models.DestinationElasticsearchMethod](../models/destinationelasticsearchmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/destinationgooglesheets.md b/docs/models/destinationgooglesheets.md index b70cc864..30139784 100644 --- a/docs/models/destinationgooglesheets.md +++ b/docs/models/destinationgooglesheets.md @@ -5,6 +5,6 @@ | Field | Type | Required | Description | Example | | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `credentials` | [models.AuthenticationViaGoogleOAuth](../models/authenticationviagoogleoauth.md) | :heavy_check_mark: | Google API Credentials for connecting to Google Sheets and Google Drive APIs | | +| `credentials` | [models.DestinationGoogleSheetsAuthentication](../models/destinationgooglesheetsauthentication.md) | :heavy_check_mark: | Authentication method to access Google Sheets | | | `spreadsheet_id` | *str* | :heavy_check_mark: | The link to your spreadsheet. See this guide for more details. | https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG/edit | | `destination_type` | [models.DestinationGoogleSheetsGoogleSheets](../models/destinationgooglesheetsgooglesheets.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/destinationgooglesheetsauthentication.md b/docs/models/destinationgooglesheetsauthentication.md new file mode 100644 index 00000000..c776e6ec --- /dev/null +++ b/docs/models/destinationgooglesheetsauthentication.md @@ -0,0 +1,19 @@ +# DestinationGoogleSheetsAuthentication + +Authentication method to access Google Sheets + + +## Supported Types + +### `models.AuthenticateViaGoogleOAuth` + +```python +value: models.AuthenticateViaGoogleOAuth = /* values here */ +``` + +### `models.ServiceAccountKeyAuthentication` + +```python +value: models.ServiceAccountKeyAuthentication = /* values here */ +``` + diff --git a/docs/models/destinationgooglesheetsauthtype.md b/docs/models/destinationgooglesheetsauthtype.md new file mode 100644 index 00000000..e2bbce4e --- /dev/null +++ b/docs/models/destinationgooglesheetsauthtype.md @@ -0,0 +1,8 @@ +# DestinationGoogleSheetsAuthType + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `OAUTH2_0` | oauth2.0 | \ No newline at end of file diff --git a/docs/models/destinationgooglesheetsschemasauthtype.md b/docs/models/destinationgooglesheetsschemasauthtype.md new file mode 100644 index 00000000..6e083a7f --- /dev/null +++ b/docs/models/destinationgooglesheetsschemasauthtype.md @@ -0,0 +1,8 @@ +# DestinationGoogleSheetsSchemasAuthType + + +## Values + +| Name | Value | +| --------- | --------- | +| `SERVICE` | service | \ No newline at end of file diff --git a/docs/models/destinationhubspot.md b/docs/models/destinationhubspot.md new file mode 100644 index 00000000..6e7264df --- /dev/null +++ b/docs/models/destinationhubspot.md @@ -0,0 +1,10 @@ +# DestinationHubspot + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | +| `credentials` | [models.DestinationHubspotCredentials](../models/destinationhubspotcredentials.md) | :heavy_check_mark: | Choose how to authenticate to HubSpot. | +| `destination_type` | [models.DestinationHubspotHubspot](../models/destinationhubspothubspot.md) | :heavy_check_mark: | N/A | +| `object_storage_config` | [Optional[models.DestinationHubspotObjectStorageConfiguration]](../models/destinationhubspotobjectstorageconfiguration.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationhubspotcredentials.md b/docs/models/destinationhubspotcredentials.md new file mode 100644 index 00000000..ab07d0c3 --- /dev/null +++ b/docs/models/destinationhubspotcredentials.md @@ -0,0 +1,13 @@ +# DestinationHubspotCredentials + +Choose how to authenticate to HubSpot. + + +## Supported Types + +### `models.OAuth` + +```python +value: models.OAuth = /* values here */ +``` + diff --git a/docs/models/destinationicebergstoragetype.md b/docs/models/destinationhubspothubspot.md similarity index 53% rename from docs/models/destinationicebergstoragetype.md rename to docs/models/destinationhubspothubspot.md index 5b14ef18..b8cd9092 100644 --- a/docs/models/destinationicebergstoragetype.md +++ b/docs/models/destinationhubspothubspot.md @@ -1,8 +1,8 @@ -# DestinationIcebergStorageType +# DestinationHubspotHubspot ## Values | Name | Value | | --------- | --------- | -| `MANAGED` | MANAGED | \ No newline at end of file +| `HUBSPOT` | hubspot | \ No newline at end of file diff --git a/docs/models/servermanaged.md b/docs/models/destinationhubspotnone.md similarity index 72% rename from docs/models/servermanaged.md rename to docs/models/destinationhubspotnone.md index 561b71e1..5c756bd7 100644 --- a/docs/models/servermanaged.md +++ b/docs/models/destinationhubspotnone.md @@ -1,11 +1,9 @@ -# ServerManaged - -Server-managed object storage +# DestinationHubspotNone ## Fields | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -| `managed_warehouse_name` | *str* | :heavy_check_mark: | The name of the managed warehouse | -| `storage_type` | [Optional[models.DestinationIcebergStorageType]](../models/destinationicebergstoragetype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `storage_type` | [Optional[models.DestinationHubspotStorageType]](../models/destinationhubspotstoragetype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationhubspotobjectstorageconfiguration.md b/docs/models/destinationhubspotobjectstorageconfiguration.md new file mode 100644 index 00000000..c89c576c --- /dev/null +++ b/docs/models/destinationhubspotobjectstorageconfiguration.md @@ -0,0 +1,17 @@ +# DestinationHubspotObjectStorageConfiguration + + +## Supported Types + +### `models.DestinationHubspotNone` + +```python +value: models.DestinationHubspotNone = /* values here */ +``` + +### `models.DestinationHubspotS3` + +```python +value: models.DestinationHubspotS3 = /* values here */ +``` + diff --git a/docs/models/destinationicebergs3.md b/docs/models/destinationhubspots3.md similarity index 76% rename from docs/models/destinationicebergs3.md rename to docs/models/destinationhubspots3.md index 9cfbb886..99e36abd 100644 --- a/docs/models/destinationicebergs3.md +++ b/docs/models/destinationhubspots3.md @@ -1,16 +1,16 @@ -# DestinationIcebergS3 - -S3 object storage +# DestinationHubspotS3 ## Fields | Field | Type | Required | Description | Example | | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `access_key_id` | *str* | :heavy_check_mark: | The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here. | A012345678910EXAMPLE | -| `s3_warehouse_uri` | *str* | :heavy_check_mark: | The Warehouse Uri for Iceberg | s3a://my-bucket/path/to/warehouse | -| `secret_access_key` | *str* | :heavy_check_mark: | The corresponding secret to the access key ID. Read more here | a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY | -| `s3_bucket_region` | [Optional[models.DestinationIcebergS3BucketRegion]](../models/destinationicebergs3bucketregion.md) | :heavy_minus_sign: | The region of the S3 bucket. See here for all region codes. | | +| `bucket_path` | *str* | :heavy_check_mark: | All files in the bucket will be prefixed by this. | prefix/ | +| `s3_bucket_name` | *str* | :heavy_check_mark: | The name of the S3 bucket. Read more here. | airbyte_sync | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | +| `access_key_id` | *Optional[str]* | :heavy_minus_sign: | The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here. | A012345678910EXAMPLE | +| `role_arn` | *Optional[str]* | :heavy_minus_sign: | The ARN of the AWS role to assume. Only usable in Airbyte Cloud. | arn:aws:iam::123456789:role/ExternalIdIsYourWorkspaceId | +| `s3_bucket_region` | [Optional[models.DestinationHubspotS3BucketRegion]](../models/destinationhubspots3bucketregion.md) | :heavy_minus_sign: | The region of the S3 bucket. See here for all region codes. | us-east-1 | | `s3_endpoint` | *Optional[str]* | :heavy_minus_sign: | Your S3 endpoint url. Read more here | http://localhost:9000 | -| `s3_path_style_access` | *Optional[bool]* | :heavy_minus_sign: | Use path style access | true | -| `storage_type` | [Optional[models.StorageType]](../models/storagetype.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file +| `secret_access_key` | *Optional[str]* | :heavy_minus_sign: | The corresponding secret to the access key ID. Read more here | a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY | +| `storage_type` | [Optional[models.DestinationHubspotSchemasStorageType]](../models/destinationhubspotschemasstoragetype.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/destinationicebergs3bucketregion.md b/docs/models/destinationhubspots3bucketregion.md similarity index 97% rename from docs/models/destinationicebergs3bucketregion.md rename to docs/models/destinationhubspots3bucketregion.md index 98079282..7c6a9cf5 100644 --- a/docs/models/destinationicebergs3bucketregion.md +++ b/docs/models/destinationhubspots3bucketregion.md @@ -1,4 +1,4 @@ -# DestinationIcebergS3BucketRegion +# DestinationHubspotS3BucketRegion The region of the S3 bucket. See here for all region codes. diff --git a/docs/models/destinationhubspotschemasstoragetype.md b/docs/models/destinationhubspotschemasstoragetype.md new file mode 100644 index 00000000..1fa46bb2 --- /dev/null +++ b/docs/models/destinationhubspotschemasstoragetype.md @@ -0,0 +1,8 @@ +# DestinationHubspotSchemasStorageType + + +## Values + +| Name | Value | +| ----- | ----- | +| `S3` | S3 | \ No newline at end of file diff --git a/docs/models/destinationhubspotstoragetype.md b/docs/models/destinationhubspotstoragetype.md new file mode 100644 index 00000000..d4851ac3 --- /dev/null +++ b/docs/models/destinationhubspotstoragetype.md @@ -0,0 +1,8 @@ +# DestinationHubspotStorageType + + +## Values + +| Name | Value | +| ------ | ------ | +| `NONE` | None | \ No newline at end of file diff --git a/docs/models/destinationiceberg.md b/docs/models/destinationiceberg.md deleted file mode 100644 index 6fcadaa3..00000000 --- a/docs/models/destinationiceberg.md +++ /dev/null @@ -1,11 +0,0 @@ -# DestinationIceberg - - -## Fields - -| Field | Type | Required | Description | -| ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | -| `catalog_config` | [models.IcebergCatalogConfig](../models/icebergcatalogconfig.md) | :heavy_check_mark: | Catalog config of Iceberg. | -| `format_config` | [models.FileFormat](../models/fileformat.md) | :heavy_check_mark: | File format of Iceberg storage. | -| `storage_config` | [models.StorageConfig](../models/storageconfig.md) | :heavy_check_mark: | Storage config of Iceberg. | -| `destination_type` | [models.Iceberg](../models/iceberg.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/destinationicebergschemascatalogconfigcatalogtype.md b/docs/models/destinationicebergschemascatalogconfigcatalogtype.md deleted file mode 100644 index d80e87da..00000000 --- a/docs/models/destinationicebergschemascatalogconfigcatalogtype.md +++ /dev/null @@ -1,8 +0,0 @@ -# DestinationIcebergSchemasCatalogConfigCatalogType - - -## Values - -| Name | Value | -| ------ | ------ | -| `REST` | Rest | \ No newline at end of file diff --git a/docs/models/destinationicebergschemascatalogconfigicebergcatalogconfigcatalogtype.md b/docs/models/destinationicebergschemascatalogconfigicebergcatalogconfigcatalogtype.md deleted file mode 100644 index 147f35b6..00000000 --- a/docs/models/destinationicebergschemascatalogconfigicebergcatalogconfigcatalogtype.md +++ /dev/null @@ -1,8 +0,0 @@ -# DestinationIcebergSchemasCatalogConfigIcebergCatalogConfigCatalogType - - -## Values - -| Name | Value | -| ------ | ------ | -| `GLUE` | Glue | \ No newline at end of file diff --git a/docs/models/destinationicebergschemascatalogtype.md b/docs/models/destinationicebergschemascatalogtype.md deleted file mode 100644 index 180428ea..00000000 --- a/docs/models/destinationicebergschemascatalogtype.md +++ /dev/null @@ -1,8 +0,0 @@ -# DestinationIcebergSchemasCatalogType - - -## Values - -| Name | Value | -| ------ | ------ | -| `JDBC` | Jdbc | \ No newline at end of file diff --git a/docs/models/destinationmssql.md b/docs/models/destinationmssql.md index c97cb2bd..e506bd90 100644 --- a/docs/models/destinationmssql.md +++ b/docs/models/destinationmssql.md @@ -7,12 +7,12 @@ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | | `database` | *str* | :heavy_check_mark: | The name of the MSSQL database. | | | `host` | *str* | :heavy_check_mark: | The host name of the MSSQL database. | | -| `username` | *str* | :heavy_check_mark: | The username which is used to access the database. | | +| `load_type` | [models.LoadType](../models/loadtype.md) | :heavy_check_mark: | Specifies the type of load mechanism (e.g., BULK, INSERT) and its associated configuration. | | +| `port` | *int* | :heavy_check_mark: | The port of the MSSQL database. | 1433 | +| `ssl_method` | [models.SSLMethod](../models/sslmethod.md) | :heavy_check_mark: | The encryption method which is used to communicate with the database. | | +| `user` | *str* | :heavy_check_mark: | The username which is used to access the database. | | | `destination_type` | [models.Mssql](../models/mssql.md) | :heavy_check_mark: | N/A | | | `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). | | | `password` | *Optional[str]* | :heavy_minus_sign: | The password associated with this username. | | -| `port` | *Optional[int]* | :heavy_minus_sign: | The port of the MSSQL database. | 1433 | -| `raw_data_schema` | *Optional[str]* | :heavy_minus_sign: | The schema to write raw tables into (default: airbyte_internal) | | | `schema` | *Optional[str]* | :heavy_minus_sign: | The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public". | public | -| `ssl_method` | [Optional[models.SSLMethod]](../models/sslmethod.md) | :heavy_minus_sign: | The encryption method which is used to communicate with the database. | | | `tunnel_method` | [Optional[models.DestinationMssqlSSHTunnelMethod]](../models/destinationmssqlsshtunnelmethod.md) | :heavy_minus_sign: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | | \ No newline at end of file diff --git a/docs/models/destinationmssqlloadtype.md b/docs/models/destinationmssqlloadtype.md new file mode 100644 index 00000000..cdeaa137 --- /dev/null +++ b/docs/models/destinationmssqlloadtype.md @@ -0,0 +1,8 @@ +# DestinationMssqlLoadType + + +## Values + +| Name | Value | +| ------ | ------ | +| `BULK` | BULK | \ No newline at end of file diff --git a/docs/models/destinationmssqlschemassslmethod.md b/docs/models/destinationmssqlname.md similarity index 87% rename from docs/models/destinationmssqlschemassslmethod.md rename to docs/models/destinationmssqlname.md index d28b0cb1..db83418d 100644 --- a/docs/models/destinationmssqlschemassslmethod.md +++ b/docs/models/destinationmssqlname.md @@ -1,4 +1,4 @@ -# DestinationMssqlSchemasSslMethod +# DestinationMssqlName ## Values diff --git a/docs/models/destinationmssqlnotunnel.md b/docs/models/destinationmssqlnotunnel.md index 372c5951..e5069c5e 100644 --- a/docs/models/destinationmssqlnotunnel.md +++ b/docs/models/destinationmssqlnotunnel.md @@ -1,8 +1,11 @@ # DestinationMssqlNoTunnel +No ssh tunnel needed to connect to database + ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -| `tunnel_method` | [models.DestinationMssqlTunnelMethod](../models/destinationmssqltunnelmethod.md) | :heavy_check_mark: | No ssh tunnel needed to connect to database | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `tunnel_method` | [Optional[models.DestinationMssqlTunnelMethod]](../models/destinationmssqltunnelmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationmssqlpasswordauthentication.md b/docs/models/destinationmssqlpasswordauthentication.md index 512cfd95..c468ae5d 100644 --- a/docs/models/destinationmssqlpasswordauthentication.md +++ b/docs/models/destinationmssqlpasswordauthentication.md @@ -1,12 +1,15 @@ # DestinationMssqlPasswordAuthentication +Connect through a jump server tunnel host using username and password authentication + ## Fields -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | -| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | | -| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host | | -| `tunnel_user_password` | *str* | :heavy_check_mark: | OS-level password for logging into the jump server host | | -| `tunnel_method` | [models.DestinationMssqlSchemasTunnelMethodTunnelMethod](../models/destinationmssqlschemastunnelmethodtunnelmethod.md) | :heavy_check_mark: | Connect through a jump server tunnel host using username and password authentication | | -| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | 22 | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | +| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | +| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host | +| `tunnel_user_password` | *str* | :heavy_check_mark: | OS-level password for logging into the jump server host | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `tunnel_method` | [Optional[models.DestinationMssqlSchemasTunnelMethodTunnelMethod]](../models/destinationmssqlschemastunnelmethodtunnelmethod.md) | :heavy_minus_sign: | N/A | +| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | \ No newline at end of file diff --git a/docs/models/destinationmssqlschemasloadtype.md b/docs/models/destinationmssqlschemasloadtype.md new file mode 100644 index 00000000..732a77da --- /dev/null +++ b/docs/models/destinationmssqlschemasloadtype.md @@ -0,0 +1,8 @@ +# DestinationMssqlSchemasLoadType + + +## Values + +| Name | Value | +| -------- | -------- | +| `INSERT` | INSERT | \ No newline at end of file diff --git a/docs/models/destinationmssqlschemassslmethodsslmethod.md b/docs/models/destinationmssqlschemasname.md similarity index 83% rename from docs/models/destinationmssqlschemassslmethodsslmethod.md rename to docs/models/destinationmssqlschemasname.md index 35e48db6..38b19785 100644 --- a/docs/models/destinationmssqlschemassslmethodsslmethod.md +++ b/docs/models/destinationmssqlschemasname.md @@ -1,4 +1,4 @@ -# DestinationMssqlSchemasSslMethodSslMethod +# DestinationMssqlSchemasName ## Values diff --git a/docs/models/destinationmssqlschemastunnelmethod.md b/docs/models/destinationmssqlschemastunnelmethod.md index 0712c116..91ba6a48 100644 --- a/docs/models/destinationmssqlschemastunnelmethod.md +++ b/docs/models/destinationmssqlschemastunnelmethod.md @@ -1,7 +1,5 @@ # DestinationMssqlSchemasTunnelMethod -Connect through a jump server tunnel host using username and ssh key - ## Values diff --git a/docs/models/destinationmssqlschemastunnelmethodtunnelmethod.md b/docs/models/destinationmssqlschemastunnelmethodtunnelmethod.md index 83b8b625..dd057079 100644 --- a/docs/models/destinationmssqlschemastunnelmethodtunnelmethod.md +++ b/docs/models/destinationmssqlschemastunnelmethodtunnelmethod.md @@ -1,7 +1,5 @@ # DestinationMssqlSchemasTunnelMethodTunnelMethod -Connect through a jump server tunnel host using username and password authentication - ## Values diff --git a/docs/models/destinationmssqlsshkeyauthentication.md b/docs/models/destinationmssqlsshkeyauthentication.md index 46863b26..2e90564f 100644 --- a/docs/models/destinationmssqlsshkeyauthentication.md +++ b/docs/models/destinationmssqlsshkeyauthentication.md @@ -1,12 +1,15 @@ # DestinationMssqlSSHKeyAuthentication +Connect through a jump server tunnel host using username and ssh key + ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | -| `ssh_key` | *str* | :heavy_check_mark: | OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa ) | | -| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | | -| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host. | | -| `tunnel_method` | [models.DestinationMssqlSchemasTunnelMethod](../models/destinationmssqlschemastunnelmethod.md) | :heavy_check_mark: | Connect through a jump server tunnel host using username and ssh key | | -| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | 22 | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | +| `ssh_key` | *str* | :heavy_check_mark: | OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa ) | +| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | +| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `tunnel_method` | [Optional[models.DestinationMssqlSchemasTunnelMethod]](../models/destinationmssqlschemastunnelmethod.md) | :heavy_minus_sign: | N/A | +| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | \ No newline at end of file diff --git a/docs/models/destinationmssqltunnelmethod.md b/docs/models/destinationmssqltunnelmethod.md index 5b9af935..8ec91501 100644 --- a/docs/models/destinationmssqltunnelmethod.md +++ b/docs/models/destinationmssqltunnelmethod.md @@ -1,7 +1,5 @@ # DestinationMssqlTunnelMethod -No ssh tunnel needed to connect to database - ## Values diff --git a/docs/models/destinationmssqlv2.md b/docs/models/destinationmssqlv2.md new file mode 100644 index 00000000..d7a04665 --- /dev/null +++ b/docs/models/destinationmssqlv2.md @@ -0,0 +1,17 @@ +# DestinationMssqlV2 + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `database` | *str* | :heavy_check_mark: | The name of the MSSQL database. | | +| `host` | *str* | :heavy_check_mark: | The host name of the MSSQL database. | | +| `load_type` | [models.DestinationMssqlV2LoadType](../models/destinationmssqlv2loadtype.md) | :heavy_check_mark: | Specifies the type of load mechanism (e.g., BULK, INSERT) and its associated configuration. | | +| `port` | *int* | :heavy_check_mark: | The port of the MSSQL database. | 1433 | +| `ssl_method` | [models.DestinationMssqlV2SSLMethod](../models/destinationmssqlv2sslmethod.md) | :heavy_check_mark: | The encryption method which is used to communicate with the database. | | +| `user` | *str* | :heavy_check_mark: | The username which is used to access the database. | | +| `destination_type` | [models.MssqlV2](../models/mssqlv2.md) | :heavy_check_mark: | N/A | | +| `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). | | +| `password` | *Optional[str]* | :heavy_minus_sign: | The password associated with this username. | | +| `schema` | *Optional[str]* | :heavy_minus_sign: | The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public". | public | \ No newline at end of file diff --git a/docs/models/destinationmssqlv2bulkload.md b/docs/models/destinationmssqlv2bulkload.md new file mode 100644 index 00000000..2bb1516f --- /dev/null +++ b/docs/models/destinationmssqlv2bulkload.md @@ -0,0 +1,16 @@ +# DestinationMssqlV2BulkLoad + +Configuration details for using the BULK loading mechanism. + + +## Fields + +| Field | Type | Required | Description | Example | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `azure_blob_storage_account_name` | *str* | :heavy_check_mark: | The name of the Azure Blob Storage account. See: https://learn.microsoft.com/azure/storage/blobs/storage-blobs-introduction#storage-accounts | mystorageaccount | +| `azure_blob_storage_container_name` | *str* | :heavy_check_mark: | The name of the Azure Blob Storage container. See: https://learn.microsoft.com/azure/storage/blobs/storage-blobs-introduction#containers | mycontainer | +| `bulk_load_data_source` | *str* | :heavy_check_mark: | Specifies the external data source name configured in MSSQL, which references the Azure Blob container. See: https://learn.microsoft.com/sql/t-sql/statements/bulk-insert-transact-sql | MyAzureBlobStorage | +| `shared_access_signature` | *str* | :heavy_check_mark: | A shared access signature (SAS) provides secure delegated access to resources in your storage account. See: https://learn.microsoft.com/azure/storage/common/storage-sas-overview | a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | +| `bulk_load_validate_values_pre_load` | *Optional[bool]* | :heavy_minus_sign: | When enabled, Airbyte will validate all values before loading them into the destination table. This provides stronger data integrity guarantees but may significantly impact performance. | false | +| `load_type` | [Optional[models.DestinationMssqlV2SchemasLoadType]](../models/destinationmssqlv2schemasloadtype.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/destinationmssqlv2encryptedtrustservercertificate.md b/docs/models/destinationmssqlv2encryptedtrustservercertificate.md new file mode 100644 index 00000000..49016f9e --- /dev/null +++ b/docs/models/destinationmssqlv2encryptedtrustservercertificate.md @@ -0,0 +1,11 @@ +# DestinationMssqlV2EncryptedTrustServerCertificate + +Use the certificate provided by the server without verification. (For testing purposes only!) + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `name` | [Optional[models.DestinationMssqlV2SchemasName]](../models/destinationmssqlv2schemasname.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationmssqlv2encryptedverifycertificate.md b/docs/models/destinationmssqlv2encryptedverifycertificate.md new file mode 100644 index 00000000..7063ac0b --- /dev/null +++ b/docs/models/destinationmssqlv2encryptedverifycertificate.md @@ -0,0 +1,14 @@ +# DestinationMssqlV2EncryptedVerifyCertificate + +Verify and use the certificate provided by the server. + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `host_name_in_certificate` | *Optional[str]* | :heavy_minus_sign: | Specifies the host name of the server. The value of this property must match the subject property of the certificate. | +| `name` | [Optional[models.DestinationMssqlV2SchemasSslMethodName]](../models/destinationmssqlv2schemassslmethodname.md) | :heavy_minus_sign: | N/A | +| `trust_store_name` | *Optional[str]* | :heavy_minus_sign: | Specifies the name of the trust store. | +| `trust_store_password` | *Optional[str]* | :heavy_minus_sign: | Specifies the password of the trust store. | \ No newline at end of file diff --git a/docs/models/destinationmssqlv2insertload.md b/docs/models/destinationmssqlv2insertload.md new file mode 100644 index 00000000..1891fcc8 --- /dev/null +++ b/docs/models/destinationmssqlv2insertload.md @@ -0,0 +1,11 @@ +# DestinationMssqlV2InsertLoad + +Configuration details for using the INSERT loading mechanism. + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `load_type` | [Optional[models.DestinationMssqlV2SchemasLoadTypeLoadType]](../models/destinationmssqlv2schemasloadtypeloadtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationmssqlv2loadtype.md b/docs/models/destinationmssqlv2loadtype.md new file mode 100644 index 00000000..1911cb34 --- /dev/null +++ b/docs/models/destinationmssqlv2loadtype.md @@ -0,0 +1,19 @@ +# DestinationMssqlV2LoadType + +Specifies the type of load mechanism (e.g., BULK, INSERT) and its associated configuration. + + +## Supported Types + +### `models.DestinationMssqlV2InsertLoad` + +```python +value: models.DestinationMssqlV2InsertLoad = /* values here */ +``` + +### `models.DestinationMssqlV2BulkLoad` + +```python +value: models.DestinationMssqlV2BulkLoad = /* values here */ +``` + diff --git a/docs/models/destinationmssqlsslmethod.md b/docs/models/destinationmssqlv2name.md similarity index 80% rename from docs/models/destinationmssqlsslmethod.md rename to docs/models/destinationmssqlv2name.md index 9d6bfded..b6726323 100644 --- a/docs/models/destinationmssqlsslmethod.md +++ b/docs/models/destinationmssqlv2name.md @@ -1,4 +1,4 @@ -# DestinationMssqlSslMethod +# DestinationMssqlV2Name ## Values diff --git a/docs/models/destinationmssqlv2schemasloadtype.md b/docs/models/destinationmssqlv2schemasloadtype.md new file mode 100644 index 00000000..50a34cf7 --- /dev/null +++ b/docs/models/destinationmssqlv2schemasloadtype.md @@ -0,0 +1,8 @@ +# DestinationMssqlV2SchemasLoadType + + +## Values + +| Name | Value | +| ------ | ------ | +| `BULK` | BULK | \ No newline at end of file diff --git a/docs/models/destinationmssqlv2schemasloadtypeloadtype.md b/docs/models/destinationmssqlv2schemasloadtypeloadtype.md new file mode 100644 index 00000000..85fa1e34 --- /dev/null +++ b/docs/models/destinationmssqlv2schemasloadtypeloadtype.md @@ -0,0 +1,8 @@ +# DestinationMssqlV2SchemasLoadTypeLoadType + + +## Values + +| Name | Value | +| -------- | -------- | +| `INSERT` | INSERT | \ No newline at end of file diff --git a/docs/models/destinationmssqlv2schemasname.md b/docs/models/destinationmssqlv2schemasname.md new file mode 100644 index 00000000..50dec753 --- /dev/null +++ b/docs/models/destinationmssqlv2schemasname.md @@ -0,0 +1,8 @@ +# DestinationMssqlV2SchemasName + + +## Values + +| Name | Value | +| ------------------------------------ | ------------------------------------ | +| `ENCRYPTED_TRUST_SERVER_CERTIFICATE` | encrypted_trust_server_certificate | \ No newline at end of file diff --git a/docs/models/destinationmssqlv2schemassslmethodname.md b/docs/models/destinationmssqlv2schemassslmethodname.md new file mode 100644 index 00000000..ca15c519 --- /dev/null +++ b/docs/models/destinationmssqlv2schemassslmethodname.md @@ -0,0 +1,8 @@ +# DestinationMssqlV2SchemasSslMethodName + + +## Values + +| Name | Value | +| ------------------------------ | ------------------------------ | +| `ENCRYPTED_VERIFY_CERTIFICATE` | encrypted_verify_certificate | \ No newline at end of file diff --git a/docs/models/destinationmssqlv2sslmethod.md b/docs/models/destinationmssqlv2sslmethod.md new file mode 100644 index 00000000..ac1d06aa --- /dev/null +++ b/docs/models/destinationmssqlv2sslmethod.md @@ -0,0 +1,25 @@ +# DestinationMssqlV2SSLMethod + +The encryption method which is used to communicate with the database. + + +## Supported Types + +### `models.DestinationMssqlV2Unencrypted` + +```python +value: models.DestinationMssqlV2Unencrypted = /* values here */ +``` + +### `models.DestinationMssqlV2EncryptedTrustServerCertificate` + +```python +value: models.DestinationMssqlV2EncryptedTrustServerCertificate = /* values here */ +``` + +### `models.DestinationMssqlV2EncryptedVerifyCertificate` + +```python +value: models.DestinationMssqlV2EncryptedVerifyCertificate = /* values here */ +``` + diff --git a/docs/models/destinationmssqlv2unencrypted.md b/docs/models/destinationmssqlv2unencrypted.md new file mode 100644 index 00000000..fbf712a9 --- /dev/null +++ b/docs/models/destinationmssqlv2unencrypted.md @@ -0,0 +1,11 @@ +# DestinationMssqlV2Unencrypted + +The data transfer will not be encrypted. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `name` | [Optional[models.DestinationMssqlV2Name]](../models/destinationmssqlv2name.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationoracle.md b/docs/models/destinationoracle.md index c4f09357..1f8cd680 100644 --- a/docs/models/destinationoracle.md +++ b/docs/models/destinationoracle.md @@ -9,7 +9,7 @@ | `sid` | *str* | :heavy_check_mark: | The System Identifier uniquely distinguishes the instance from any other instance on the same computer. | | | `username` | *str* | :heavy_check_mark: | The username to access the database. This user must have CREATE USER privileges in the database. | | | `destination_type` | [models.Oracle](../models/oracle.md) | :heavy_check_mark: | N/A | | -| `encryption` | [Optional[models.Encryption]](../models/encryption.md) | :heavy_minus_sign: | The encryption method which is used when communicating with the database. | | +| `encryption` | [Optional[models.DestinationOracleEncryption]](../models/destinationoracleencryption.md) | :heavy_minus_sign: | The encryption method which is used when communicating with the database. | | | `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). | | | `password` | *Optional[str]* | :heavy_minus_sign: | The password associated with the username. | | | `port` | *Optional[int]* | :heavy_minus_sign: | The port of the database. | 1521 | diff --git a/docs/models/destinationoracleencryption.md b/docs/models/destinationoracleencryption.md new file mode 100644 index 00000000..f3371f53 --- /dev/null +++ b/docs/models/destinationoracleencryption.md @@ -0,0 +1,25 @@ +# DestinationOracleEncryption + +The encryption method which is used when communicating with the database. + + +## Supported Types + +### `models.DestinationOracleUnencrypted` + +```python +value: models.DestinationOracleUnencrypted = /* values here */ +``` + +### `models.NativeNetworkEncryptionNNE` + +```python +value: models.NativeNetworkEncryptionNNE = /* values here */ +``` + +### `models.TLSEncryptedVerifyCertificate` + +```python +value: models.TLSEncryptedVerifyCertificate = /* values here */ +``` + diff --git a/docs/models/destinationpatchrequest.md b/docs/models/destinationpatchrequest.md index 4d1cee9e..29b1bb2a 100644 --- a/docs/models/destinationpatchrequest.md +++ b/docs/models/destinationpatchrequest.md @@ -3,7 +3,8 @@ ## Fields -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | -| `configuration` | [Optional[models.DestinationConfiguration]](../models/destinationconfiguration.md) | :heavy_minus_sign: | The values required to configure the destination. | {
"user": "charles"
} | -| `name` | *Optional[str]* | :heavy_minus_sign: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `configuration` | [Optional[models.DestinationConfiguration]](../models/destinationconfiguration.md) | :heavy_minus_sign: | The values required to configure the destination. | {
"user": "charles"
} | +| `name` | *Optional[str]* | :heavy_minus_sign: | N/A | | +| `resource_allocation` | [Optional[models.ScopedResourceRequirements]](../models/scopedresourcerequirements.md) | :heavy_minus_sign: | actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level. | | \ No newline at end of file diff --git a/docs/models/destinationpostgres.md b/docs/models/destinationpostgres.md index 4ef23a97..69743b9d 100644 --- a/docs/models/destinationpostgres.md +++ b/docs/models/destinationpostgres.md @@ -18,4 +18,5 @@ | `schema` | *Optional[str]* | :heavy_minus_sign: | The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public". | public | | `ssl` | *Optional[bool]* | :heavy_minus_sign: | Encrypt data using SSL. When activating SSL, please select one of the connection modes. | | | `ssl_mode` | [Optional[models.SSLModes]](../models/sslmodes.md) | :heavy_minus_sign: | SSL connection modes.
disable - Chose this mode to disable encryption of communication between Airbyte and destination database
allow - Chose this mode to enable encryption only when required by the source database
prefer - Chose this mode to allow unencrypted connection only if the source database does not support encryption
require - Chose this mode to always require encryption. If the source database server does not support encryption, connection will fail
verify-ca - Chose this mode to always require encryption and to verify that the source database server has a valid SSL certificate
verify-full - This is the most secure mode. Chose this mode to always require encryption and to verify the identity of the source database server
See more information - in the docs. | | -| `tunnel_method` | [Optional[models.DestinationPostgresSSHTunnelMethod]](../models/destinationpostgressshtunnelmethod.md) | :heavy_minus_sign: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | | \ No newline at end of file +| `tunnel_method` | [Optional[models.DestinationPostgresSSHTunnelMethod]](../models/destinationpostgressshtunnelmethod.md) | :heavy_minus_sign: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | | +| `unconstrained_number` | *Optional[bool]* | :heavy_minus_sign: | Create numeric columns as unconstrained DECIMAL instead of NUMBER(38, 9). This will allow increased precision in numeric values. (this is disabled by default for backwards compatibility, but is recommended to enable) | | \ No newline at end of file diff --git a/docs/models/destinationpostgresmode.md b/docs/models/destinationpostgresmode.md index 06ba06b1..a404aea2 100644 --- a/docs/models/destinationpostgresmode.md +++ b/docs/models/destinationpostgresmode.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------- | ------- | -| `ALLOW` | allow | \ No newline at end of file +| Name | Value | +| --------- | --------- | +| `DISABLE` | disable | \ No newline at end of file diff --git a/docs/models/destinationpostgresschemasmode.md b/docs/models/destinationpostgresschemasmode.md index 53244bed..4418e917 100644 --- a/docs/models/destinationpostgresschemasmode.md +++ b/docs/models/destinationpostgresschemasmode.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| -------- | -------- | -| `PREFER` | prefer | \ No newline at end of file +| Name | Value | +| ------- | ------- | +| `ALLOW` | allow | \ No newline at end of file diff --git a/docs/models/destinationpostgresschemassslmodemode.md b/docs/models/destinationpostgresschemassslmodemode.md index 8b1d66e6..c1914d10 100644 --- a/docs/models/destinationpostgresschemassslmodemode.md +++ b/docs/models/destinationpostgresschemassslmodemode.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| --------- | --------- | -| `REQUIRE` | require | \ No newline at end of file +| Name | Value | +| -------- | -------- | +| `PREFER` | prefer | \ No newline at end of file diff --git a/docs/models/destinationpostgresschemassslmodesslmodes1mode.md b/docs/models/destinationpostgresschemassslmodesslmodes1mode.md deleted file mode 100644 index 7dbda370..00000000 --- a/docs/models/destinationpostgresschemassslmodesslmodes1mode.md +++ /dev/null @@ -1,8 +0,0 @@ -# DestinationPostgresSchemasSSLModeSSLModes1Mode - - -## Values - -| Name | Value | -| --------- | --------- | -| `DISABLE` | disable | \ No newline at end of file diff --git a/docs/models/destinationpostgresschemassslmodesslmodes5mode.md b/docs/models/destinationpostgresschemassslmodesslmodes5mode.md new file mode 100644 index 00000000..72134089 --- /dev/null +++ b/docs/models/destinationpostgresschemassslmodesslmodes5mode.md @@ -0,0 +1,8 @@ +# DestinationPostgresSchemasSSLModeSSLModes5Mode + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `VERIFY_CA` | verify-ca | \ No newline at end of file diff --git a/docs/models/destinationpostgresschemassslmodesslmodesmode.md b/docs/models/destinationpostgresschemassslmodesslmodesmode.md index 90965225..60f1a059 100644 --- a/docs/models/destinationpostgresschemassslmodesslmodesmode.md +++ b/docs/models/destinationpostgresschemassslmodesslmodesmode.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ----------- | ----------- | -| `VERIFY_CA` | verify-ca | \ No newline at end of file +| Name | Value | +| --------- | --------- | +| `REQUIRE` | require | \ No newline at end of file diff --git a/docs/models/destinationputrequest.md b/docs/models/destinationputrequest.md index 7b57324e..bcfa5530 100644 --- a/docs/models/destinationputrequest.md +++ b/docs/models/destinationputrequest.md @@ -3,7 +3,8 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | -| `configuration` | [models.DestinationConfiguration](../models/destinationconfiguration.md) | :heavy_check_mark: | The values required to configure the destination. | {
"user": "charles"
} | -| `name` | *str* | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `configuration` | [models.DestinationConfiguration](../models/destinationconfiguration.md) | :heavy_check_mark: | The values required to configure the destination. | {
"user": "charles"
} | +| `name` | *str* | :heavy_check_mark: | N/A | | +| `resource_allocation` | [Optional[models.ScopedResourceRequirements]](../models/scopedresourcerequirements.md) | :heavy_minus_sign: | actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level. | | \ No newline at end of file diff --git a/docs/models/destinationresponse.md b/docs/models/destinationresponse.md index b4453cbb..5e5ee7f2 100644 --- a/docs/models/destinationresponse.md +++ b/docs/models/destinationresponse.md @@ -5,12 +5,13 @@ Provides details of a single destination. ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | -| `configuration` | [models.DestinationConfiguration](../models/destinationconfiguration.md) | :heavy_check_mark: | The values required to configure the destination. | {
"user": "charles"
} | -| `created_at` | *int* | :heavy_check_mark: | N/A | | -| `definition_id` | *str* | :heavy_check_mark: | N/A | | -| `destination_id` | *str* | :heavy_check_mark: | N/A | | -| `destination_type` | *str* | :heavy_check_mark: | N/A | | -| `name` | *str* | :heavy_check_mark: | N/A | | -| `workspace_id` | *str* | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `configuration` | [models.DestinationConfiguration](../models/destinationconfiguration.md) | :heavy_check_mark: | The values required to configure the destination. | {
"user": "charles"
} | +| `created_at` | *int* | :heavy_check_mark: | N/A | | +| `definition_id` | *str* | :heavy_check_mark: | N/A | | +| `destination_id` | *str* | :heavy_check_mark: | N/A | | +| `destination_type` | *str* | :heavy_check_mark: | N/A | | +| `name` | *str* | :heavy_check_mark: | N/A | | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | | +| `resource_allocation` | [Optional[models.ScopedResourceRequirements]](../models/scopedresourcerequirements.md) | :heavy_minus_sign: | actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level. | | \ No newline at end of file diff --git a/docs/models/destinations3.md b/docs/models/destinations3.md index 0e2dab8f..3c4aef7d 100644 --- a/docs/models/destinations3.md +++ b/docs/models/destinations3.md @@ -3,16 +3,16 @@ ## Fields -| Field | Type | Required | Description | Example | -| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `format` | [models.DestinationS3OutputFormat](../models/destinations3outputformat.md) | :heavy_check_mark: | Format of the data output. See here for more details | | -| `s3_bucket_name` | *str* | :heavy_check_mark: | The name of the S3 bucket. Read more here. | airbyte_sync | -| `s3_bucket_path` | *str* | :heavy_check_mark: | Directory under the S3 bucket where data will be written. Read more here | data_sync/test | -| `access_key_id` | *Optional[str]* | :heavy_minus_sign: | The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here. | A012345678910EXAMPLE | -| `destination_type` | [models.S3](../models/s3.md) | :heavy_check_mark: | N/A | | -| `file_name_pattern` | *Optional[str]* | :heavy_minus_sign: | The pattern allows you to set the file-name format for the S3 staging file(s) | {date} | -| `role_arn` | *Optional[str]* | :heavy_minus_sign: | The Role ARN | arn:aws:iam::123456789:role/ExternalIdIsYourWorkspaceId | -| `s3_bucket_region` | [Optional[models.DestinationS3S3BucketRegion]](../models/destinations3s3bucketregion.md) | :heavy_minus_sign: | The region of the S3 bucket. See here for all region codes. | | -| `s3_endpoint` | *Optional[str]* | :heavy_minus_sign: | Your S3 endpoint url. Read more here | http://localhost:9000 | -| `s3_path_format` | *Optional[str]* | :heavy_minus_sign: | Format string on how data will be organized inside the S3 bucket directory. Read more here | ${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_ | -| `secret_access_key` | *Optional[str]* | :heavy_minus_sign: | The corresponding secret to the access key ID. Read more here | a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `format` | [models.DestinationS3OutputFormat](../models/destinations3outputformat.md) | :heavy_check_mark: | Format of the data output. See here for more details | | +| `s3_bucket_name` | *str* | :heavy_check_mark: | The name of the S3 bucket. Read more here. | airbyte_sync | +| `s3_bucket_path` | *str* | :heavy_check_mark: | Directory under the S3 bucket where data will be written. Read more here | data_sync/test | +| `access_key_id` | *Optional[str]* | :heavy_minus_sign: | The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here. | A012345678910EXAMPLE | +| `destination_type` | [models.S3](../models/s3.md) | :heavy_check_mark: | N/A | | +| `file_name_pattern` | *Optional[str]* | :heavy_minus_sign: | Pattern to match file names in the bucket directory. Read more here | {date} | +| `role_arn` | *Optional[str]* | :heavy_minus_sign: | The ARN of the AWS role to assume. Only usable in Airbyte Cloud. | arn:aws:iam::123456789:role/ExternalIdIsYourWorkspaceId | +| `s3_bucket_region` | [Optional[models.DestinationS3S3BucketRegion]](../models/destinations3s3bucketregion.md) | :heavy_minus_sign: | The region of the S3 bucket. See here for all region codes. | us-east-1 | +| `s3_endpoint` | *Optional[str]* | :heavy_minus_sign: | Your S3 endpoint url. Read more here | http://localhost:9000 | +| `s3_path_format` | *Optional[str]* | :heavy_minus_sign: | Format string on how data will be organized inside the bucket directory. Read more here | ${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_ | +| `secret_access_key` | *Optional[str]* | :heavy_minus_sign: | The corresponding secret to the access key ID. Read more here | a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY | \ No newline at end of file diff --git a/docs/models/destinations3avroapacheavro.md b/docs/models/destinations3avroapacheavro.md index 9770c648..c230081d 100644 --- a/docs/models/destinations3avroapacheavro.md +++ b/docs/models/destinations3avroapacheavro.md @@ -6,4 +6,5 @@ | Field | Type | Required | Description | | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | | `compression_codec` | [models.DestinationS3CompressionCodec](../models/destinations3compressioncodec.md) | :heavy_check_mark: | The compression algorithm used to compress data. Default to no compression. | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | `format_type` | [Optional[models.DestinationS3SchemasFormatFormatType]](../models/destinations3schemasformatformattype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinations3bzip2.md b/docs/models/destinations3bzip2.md index 0e064bac..f602d3c7 100644 --- a/docs/models/destinations3bzip2.md +++ b/docs/models/destinations3bzip2.md @@ -5,4 +5,5 @@ | Field | Type | Required | Description | | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | `codec` | [Optional[models.DestinationS3SchemasFormatCodec]](../models/destinations3schemasformatcodec.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinations3compression.md b/docs/models/destinations3compression.md index d82c6d23..1a958c0a 100644 --- a/docs/models/destinations3compression.md +++ b/docs/models/destinations3compression.md @@ -1,6 +1,6 @@ # DestinationS3Compression -Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). +Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). ## Supported Types diff --git a/docs/models/destinations3csvcommaseparatedvalues.md b/docs/models/destinations3csvcommaseparatedvalues.md index f492e072..63cb70e1 100644 --- a/docs/models/destinations3csvcommaseparatedvalues.md +++ b/docs/models/destinations3csvcommaseparatedvalues.md @@ -3,8 +3,9 @@ ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | -| `compression` | [Optional[models.DestinationS3Compression]](../models/destinations3compression.md) | :heavy_minus_sign: | Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). | -| `flattening` | [Optional[models.DestinationS3Flattening]](../models/destinations3flattening.md) | :heavy_minus_sign: | Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details. | -| `format_type` | [Optional[models.DestinationS3FormatType]](../models/destinations3formattype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `compression` | [Optional[models.DestinationS3Compression]](../models/destinations3compression.md) | :heavy_minus_sign: | Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). | +| `flattening` | [Optional[models.DestinationS3Flattening]](../models/destinations3flattening.md) | :heavy_minus_sign: | N/A | +| `format_type` | [Optional[models.DestinationS3FormatType]](../models/destinations3formattype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinations3datalake.md b/docs/models/destinations3datalake.md new file mode 100644 index 00000000..7bff6b3e --- /dev/null +++ b/docs/models/destinations3datalake.md @@ -0,0 +1,18 @@ +# DestinationS3DataLake + +Defines the configurations required to connect to an Iceberg catalog, including warehouse location, main branch name, and catalog type specifics. + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `catalog_type` | [models.CatalogType](../models/catalogtype.md) | :heavy_check_mark: | Specifies the type of Iceberg catalog (e.g., NESSIE, GLUE, REST) and its associated configuration. | | +| `s3_bucket_name` | *str* | :heavy_check_mark: | The name of the S3 bucket that will host the Iceberg data. | | +| `s3_bucket_region` | [models.DestinationS3DataLakeS3BucketRegion](../models/destinations3datalakes3bucketregion.md) | :heavy_check_mark: | The region of the S3 bucket. See here for all region codes. | us-east-1 | +| `warehouse_location` | *str* | :heavy_check_mark: | The root location of the data warehouse used by the Iceberg catalog. Typically includes a bucket name and path within that bucket. For AWS Glue and Nessie, must include the storage protocol (such as "s3://" for Amazon S3). | s3://your-bucket/path/to/store/files/in | +| `access_key_id` | *Optional[str]* | :heavy_minus_sign: | The AWS Access Key ID with permissions for S3 and Glue operations. | | +| `destination_type` | [models.S3DataLake](../models/s3datalake.md) | :heavy_check_mark: | N/A | | +| `main_branch_name` | *Optional[str]* | :heavy_minus_sign: | The primary or default branch name in the catalog. Most query engines will use "main" by default. See Iceberg documentation for more information. | | +| `s3_endpoint` | *Optional[str]* | :heavy_minus_sign: | Your S3 endpoint url. Read more here | | +| `secret_access_key` | *Optional[str]* | :heavy_minus_sign: | The AWS Secret Access Key paired with the Access Key ID for AWS authentication. | | \ No newline at end of file diff --git a/docs/models/destinations3datalakecatalogtype.md b/docs/models/destinations3datalakecatalogtype.md new file mode 100644 index 00000000..09da131a --- /dev/null +++ b/docs/models/destinations3datalakecatalogtype.md @@ -0,0 +1,8 @@ +# DestinationS3DataLakeCatalogType + + +## Values + +| Name | Value | +| ------ | ------ | +| `GLUE` | GLUE | \ No newline at end of file diff --git a/docs/models/destinations3datalakes3bucketregion.md b/docs/models/destinations3datalakes3bucketregion.md new file mode 100644 index 00000000..90238706 --- /dev/null +++ b/docs/models/destinations3datalakes3bucketregion.md @@ -0,0 +1,43 @@ +# DestinationS3DataLakeS3BucketRegion + +The region of the S3 bucket. See here for all region codes. + + +## Values + +| Name | Value | +| ---------------- | ---------------- | +| `UNKNOWN` | | +| `AF_SOUTH_1` | af-south-1 | +| `AP_EAST_1` | ap-east-1 | +| `AP_NORTHEAST_1` | ap-northeast-1 | +| `AP_NORTHEAST_2` | ap-northeast-2 | +| `AP_NORTHEAST_3` | ap-northeast-3 | +| `AP_SOUTH_1` | ap-south-1 | +| `AP_SOUTH_2` | ap-south-2 | +| `AP_SOUTHEAST_1` | ap-southeast-1 | +| `AP_SOUTHEAST_2` | ap-southeast-2 | +| `AP_SOUTHEAST_3` | ap-southeast-3 | +| `AP_SOUTHEAST_4` | ap-southeast-4 | +| `CA_CENTRAL_1` | ca-central-1 | +| `CA_WEST_1` | ca-west-1 | +| `CN_NORTH_1` | cn-north-1 | +| `CN_NORTHWEST_1` | cn-northwest-1 | +| `EU_CENTRAL_1` | eu-central-1 | +| `EU_CENTRAL_2` | eu-central-2 | +| `EU_NORTH_1` | eu-north-1 | +| `EU_SOUTH_1` | eu-south-1 | +| `EU_SOUTH_2` | eu-south-2 | +| `EU_WEST_1` | eu-west-1 | +| `EU_WEST_2` | eu-west-2 | +| `EU_WEST_3` | eu-west-3 | +| `IL_CENTRAL_1` | il-central-1 | +| `ME_CENTRAL_1` | me-central-1 | +| `ME_SOUTH_1` | me-south-1 | +| `SA_EAST_1` | sa-east-1 | +| `US_EAST_1` | us-east-1 | +| `US_EAST_2` | us-east-2 | +| `US_GOV_EAST_1` | us-gov-east-1 | +| `US_GOV_WEST_1` | us-gov-west-1 | +| `US_WEST_1` | us-west-1 | +| `US_WEST_2` | us-west-2 | \ No newline at end of file diff --git a/docs/models/destinations3datalakeschemascatalogtype.md b/docs/models/destinations3datalakeschemascatalogtype.md new file mode 100644 index 00000000..ac216e3d --- /dev/null +++ b/docs/models/destinations3datalakeschemascatalogtype.md @@ -0,0 +1,8 @@ +# DestinationS3DataLakeSchemasCatalogType + + +## Values + +| Name | Value | +| ------ | ------ | +| `REST` | REST | \ No newline at end of file diff --git a/docs/models/destinations3datalakeschemascatalogtypecatalogtype.md b/docs/models/destinations3datalakeschemascatalogtypecatalogtype.md new file mode 100644 index 00000000..823504c5 --- /dev/null +++ b/docs/models/destinations3datalakeschemascatalogtypecatalogtype.md @@ -0,0 +1,8 @@ +# DestinationS3DataLakeSchemasCatalogTypeCatalogType + + +## Values + +| Name | Value | +| -------- | -------- | +| `NESSIE` | NESSIE | \ No newline at end of file diff --git a/docs/models/destinations3deflate.md b/docs/models/destinations3deflate.md index a61cc431..15739f91 100644 --- a/docs/models/destinations3deflate.md +++ b/docs/models/destinations3deflate.md @@ -5,5 +5,6 @@ | Field | Type | Required | Description | | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | -| `codec` | [Optional[models.DestinationS3SchemasCodec]](../models/destinations3schemascodec.md) | :heavy_minus_sign: | N/A | -| `compression_level` | *Optional[int]* | :heavy_minus_sign: | 0: no compression & fastest, 9: best compression & slowest. | \ No newline at end of file +| `compression_level` | *int* | :heavy_check_mark: | N/A | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `codec` | [Optional[models.DestinationS3SchemasCodec]](../models/destinations3schemascodec.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinations3flattening.md b/docs/models/destinations3flattening.md index e3838f1b..dcc514c0 100644 --- a/docs/models/destinations3flattening.md +++ b/docs/models/destinations3flattening.md @@ -1,7 +1,5 @@ # DestinationS3Flattening -Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details. - ## Values diff --git a/docs/models/destinations3gluecompression.md b/docs/models/destinations3gluecompression.md deleted file mode 100644 index 29df7693..00000000 --- a/docs/models/destinations3gluecompression.md +++ /dev/null @@ -1,19 +0,0 @@ -# DestinationS3GlueCompression - -Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). - - -## Supported Types - -### `models.DestinationS3GlueNoCompression` - -```python -value: models.DestinationS3GlueNoCompression = /* values here */ -``` - -### `models.DestinationS3GlueGZIP` - -```python -value: models.DestinationS3GlueGZIP = /* values here */ -``` - diff --git a/docs/models/destinations3glueoutputformat.md b/docs/models/destinations3glueoutputformat.md deleted file mode 100644 index abf1cc4f..00000000 --- a/docs/models/destinations3glueoutputformat.md +++ /dev/null @@ -1,13 +0,0 @@ -# DestinationS3GlueOutputFormat - -Format of the data output. See here for more details - - -## Supported Types - -### `models.DestinationS3GlueJSONLinesNewlineDelimitedJSON` - -```python -value: models.DestinationS3GlueJSONLinesNewlineDelimitedJSON = /* values here */ -``` - diff --git a/docs/models/destinations3glueschemascompressiontype.md b/docs/models/destinations3glueschemascompressiontype.md deleted file mode 100644 index f04b3e8a..00000000 --- a/docs/models/destinations3glueschemascompressiontype.md +++ /dev/null @@ -1,8 +0,0 @@ -# DestinationS3GlueSchemasCompressionType - - -## Values - -| Name | Value | -| ------ | ------ | -| `GZIP` | GZIP | \ No newline at end of file diff --git a/docs/models/destinations3gzip.md b/docs/models/destinations3gzip.md index 65ed095c..10a5f387 100644 --- a/docs/models/destinations3gzip.md +++ b/docs/models/destinations3gzip.md @@ -5,4 +5,5 @@ | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | `compression_type` | [Optional[models.DestinationS3SchemasCompressionType]](../models/destinations3schemascompressiontype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinations3jsonlinesnewlinedelimitedjson.md b/docs/models/destinations3jsonlinesnewlinedelimitedjson.md index f973238d..78b00e8f 100644 --- a/docs/models/destinations3jsonlinesnewlinedelimitedjson.md +++ b/docs/models/destinations3jsonlinesnewlinedelimitedjson.md @@ -5,6 +5,7 @@ | Field | Type | Required | Description | | ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | `compression` | [Optional[models.DestinationS3SchemasCompression]](../models/destinations3schemascompression.md) | :heavy_minus_sign: | Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). | -| `flattening` | [Optional[models.DestinationS3SchemasFlattening]](../models/destinations3schemasflattening.md) | :heavy_minus_sign: | Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details. | +| `flattening` | [Optional[models.DestinationS3SchemasFlattening]](../models/destinations3schemasflattening.md) | :heavy_minus_sign: | N/A | | `format_type` | [Optional[models.DestinationS3SchemasFormatType]](../models/destinations3schemasformattype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinations3nocompression.md b/docs/models/destinations3nocompression.md index 545c0171..88912ca2 100644 --- a/docs/models/destinations3nocompression.md +++ b/docs/models/destinations3nocompression.md @@ -5,4 +5,5 @@ | Field | Type | Required | Description | | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | `compression_type` | [Optional[models.DestinationS3CompressionType]](../models/destinations3compressiontype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinations3parquetcolumnarstorage.md b/docs/models/destinations3parquetcolumnarstorage.md index 84706181..ac05c933 100644 --- a/docs/models/destinations3parquetcolumnarstorage.md +++ b/docs/models/destinations3parquetcolumnarstorage.md @@ -3,12 +3,13 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `block_size_mb` | *Optional[int]* | :heavy_minus_sign: | This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB. | 128 | -| `compression_codec` | [Optional[models.DestinationS3SchemasCompressionCodec]](../models/destinations3schemascompressioncodec.md) | :heavy_minus_sign: | The compression algorithm used to compress data pages. | | -| `dictionary_encoding` | *Optional[bool]* | :heavy_minus_sign: | Default: true. | | -| `dictionary_page_size_kb` | *Optional[int]* | :heavy_minus_sign: | There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB. | 1024 | -| `format_type` | [Optional[models.DestinationS3SchemasFormatOutputFormatFormatType]](../models/destinations3schemasformatoutputformatformattype.md) | :heavy_minus_sign: | N/A | | -| `max_padding_size_mb` | *Optional[int]* | :heavy_minus_sign: | Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB. | 8 | -| `page_size_kb` | *Optional[int]* | :heavy_minus_sign: | The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB. | 1024 | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `block_size_mb` | *Optional[int]* | :heavy_minus_sign: | This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB. | +| `compression_codec` | [Optional[models.DestinationS3SchemasCompressionCodec]](../models/destinations3schemascompressioncodec.md) | :heavy_minus_sign: | The compression algorithm used to compress data pages. | +| `dictionary_encoding` | *Optional[bool]* | :heavy_minus_sign: | Default: true. | +| `dictionary_page_size_kb` | *Optional[int]* | :heavy_minus_sign: | There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB. | +| `format_type` | [Optional[models.DestinationS3SchemasFormatOutputFormatFormatType]](../models/destinations3schemasformatoutputformatformattype.md) | :heavy_minus_sign: | N/A | +| `max_padding_size_mb` | *Optional[int]* | :heavy_minus_sign: | Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB. | +| `page_size_kb` | *Optional[int]* | :heavy_minus_sign: | The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB. | \ No newline at end of file diff --git a/docs/models/destinations3schemasflattening.md b/docs/models/destinations3schemasflattening.md index 7b930b9c..a298338f 100644 --- a/docs/models/destinations3schemasflattening.md +++ b/docs/models/destinations3schemasflattening.md @@ -1,7 +1,5 @@ # DestinationS3SchemasFlattening -Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details. - ## Values diff --git a/docs/models/destinations3schemasformatnocompression.md b/docs/models/destinations3schemasformatnocompression.md index 4d39eba8..f5025c44 100644 --- a/docs/models/destinations3schemasformatnocompression.md +++ b/docs/models/destinations3schemasformatnocompression.md @@ -5,4 +5,5 @@ | Field | Type | Required | Description | | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | `codec` | [Optional[models.DestinationS3Codec]](../models/destinations3codec.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinations3schemasgzip.md b/docs/models/destinations3schemasgzip.md index 28783bb7..16fccd23 100644 --- a/docs/models/destinations3schemasgzip.md +++ b/docs/models/destinations3schemasgzip.md @@ -5,4 +5,5 @@ | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | `compression_type` | [Optional[models.DestinationS3SchemasFormatOutputFormatCompressionType]](../models/destinations3schemasformatoutputformatcompressiontype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinations3schemasnocompression.md b/docs/models/destinations3schemasnocompression.md index d69da461..a10f5814 100644 --- a/docs/models/destinations3schemasnocompression.md +++ b/docs/models/destinations3schemasnocompression.md @@ -5,4 +5,5 @@ | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | `compression_type` | [Optional[models.DestinationS3SchemasFormatCompressionType]](../models/destinations3schemasformatcompressiontype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinations3snappy.md b/docs/models/destinations3snappy.md index 81bc4802..0c2f35e6 100644 --- a/docs/models/destinations3snappy.md +++ b/docs/models/destinations3snappy.md @@ -5,4 +5,5 @@ | Field | Type | Required | Description | | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | `codec` | [Optional[models.DestinationS3SchemasFormatOutputFormat3CompressionCodecCodec]](../models/destinations3schemasformatoutputformat3compressioncodeccodec.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinations3xz.md b/docs/models/destinations3xz.md index fd5789ad..d8ed2300 100644 --- a/docs/models/destinations3xz.md +++ b/docs/models/destinations3xz.md @@ -3,7 +3,8 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `codec` | [Optional[models.DestinationS3SchemasFormatOutputFormatCodec]](../models/destinations3schemasformatoutputformatcodec.md) | :heavy_minus_sign: | N/A | -| `compression_level` | *Optional[int]* | :heavy_minus_sign: | See here for details. | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------ | +| `compression_level` | *int* | :heavy_check_mark: | N/A | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `codec` | [Optional[models.DestinationS3SchemasFormatOutputFormatCodec]](../models/destinations3schemasformatoutputformatcodec.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinations3zstandard.md b/docs/models/destinations3zstandard.md index 598eab0c..02b1e22f 100644 --- a/docs/models/destinations3zstandard.md +++ b/docs/models/destinations3zstandard.md @@ -3,8 +3,9 @@ ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------- | -| `codec` | [Optional[models.DestinationS3SchemasFormatOutputFormat3Codec]](../models/destinations3schemasformatoutputformat3codec.md) | :heavy_minus_sign: | N/A | -| `compression_level` | *Optional[int]* | :heavy_minus_sign: | Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory. | -| `include_checksum` | *Optional[bool]* | :heavy_minus_sign: | If true, include a checksum with each data block. | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | +| `compression_level` | *int* | :heavy_check_mark: | N/A | +| `include_checksum` | *bool* | :heavy_check_mark: | N/A | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `codec` | [Optional[models.DestinationS3SchemasFormatOutputFormat3Codec]](../models/destinations3schemasformatoutputformat3codec.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationsalesforce.md b/docs/models/destinationsalesforce.md new file mode 100644 index 00000000..6b99b1c2 --- /dev/null +++ b/docs/models/destinationsalesforce.md @@ -0,0 +1,14 @@ +# DestinationSalesforce + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `client_id` | *str* | :heavy_check_mark: | Enter your Salesforce developer application's Client ID. | +| `client_secret` | *str* | :heavy_check_mark: | Enter your Salesforce developer application's Client secret. | +| `refresh_token` | *str* | :heavy_check_mark: | Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce account. | +| `auth_type` | [models.AuthType](../models/authtype.md) | :heavy_check_mark: | N/A | +| `destination_type` | [models.DestinationSalesforceSalesforce](../models/destinationsalesforcesalesforce.md) | :heavy_check_mark: | N/A | +| `is_sandbox` | *Optional[bool]* | :heavy_minus_sign: | Toggle if you're using a Salesforce Sandbox. | +| `object_storage_config` | [Optional[models.DestinationSalesforceObjectStorageConfiguration]](../models/destinationsalesforceobjectstorageconfiguration.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinations3gluenocompression.md b/docs/models/destinationsalesforcenone.md similarity index 62% rename from docs/models/destinations3gluenocompression.md rename to docs/models/destinationsalesforcenone.md index 719b810f..7a51605e 100644 --- a/docs/models/destinations3gluenocompression.md +++ b/docs/models/destinationsalesforcenone.md @@ -1,8 +1,9 @@ -# DestinationS3GlueNoCompression +# DestinationSalesforceNone ## Fields | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -| `compression_type` | [Optional[models.DestinationS3GlueCompressionType]](../models/destinations3gluecompressiontype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `storage_type` | [Optional[models.DestinationSalesforceStorageType]](../models/destinationsalesforcestoragetype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationsalesforceobjectstorageconfiguration.md b/docs/models/destinationsalesforceobjectstorageconfiguration.md new file mode 100644 index 00000000..d72cd7d8 --- /dev/null +++ b/docs/models/destinationsalesforceobjectstorageconfiguration.md @@ -0,0 +1,17 @@ +# DestinationSalesforceObjectStorageConfiguration + + +## Supported Types + +### `models.DestinationSalesforceNone` + +```python +value: models.DestinationSalesforceNone = /* values here */ +``` + +### `models.DestinationSalesforceS3` + +```python +value: models.DestinationSalesforceS3 = /* values here */ +``` + diff --git a/docs/models/destinationsalesforces3.md b/docs/models/destinationsalesforces3.md new file mode 100644 index 00000000..11db5fce --- /dev/null +++ b/docs/models/destinationsalesforces3.md @@ -0,0 +1,16 @@ +# DestinationSalesforceS3 + + +## Fields + +| Field | Type | Required | Description | Example | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `bucket_path` | *str* | :heavy_check_mark: | All files in the bucket will be prefixed by this. | prefix/ | +| `s3_bucket_name` | *str* | :heavy_check_mark: | The name of the S3 bucket. Read more here. | airbyte_sync | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | +| `access_key_id` | *Optional[str]* | :heavy_minus_sign: | The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here. | A012345678910EXAMPLE | +| `role_arn` | *Optional[str]* | :heavy_minus_sign: | The ARN of the AWS role to assume. Only usable in Airbyte Cloud. | arn:aws:iam::123456789:role/ExternalIdIsYourWorkspaceId | +| `s3_bucket_region` | [Optional[models.DestinationSalesforceS3BucketRegion]](../models/destinationsalesforces3bucketregion.md) | :heavy_minus_sign: | The region of the S3 bucket. See here for all region codes. | us-east-1 | +| `s3_endpoint` | *Optional[str]* | :heavy_minus_sign: | Your S3 endpoint url. Read more here | http://localhost:9000 | +| `secret_access_key` | *Optional[str]* | :heavy_minus_sign: | The corresponding secret to the access key ID. Read more here | a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY | +| `storage_type` | [Optional[models.DestinationSalesforceSchemasStorageType]](../models/destinationsalesforceschemasstoragetype.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/destinationsalesforces3bucketregion.md b/docs/models/destinationsalesforces3bucketregion.md new file mode 100644 index 00000000..690b1015 --- /dev/null +++ b/docs/models/destinationsalesforces3bucketregion.md @@ -0,0 +1,43 @@ +# DestinationSalesforceS3BucketRegion + +The region of the S3 bucket. See here for all region codes. + + +## Values + +| Name | Value | +| ---------------- | ---------------- | +| `UNKNOWN` | | +| `AF_SOUTH_1` | af-south-1 | +| `AP_EAST_1` | ap-east-1 | +| `AP_NORTHEAST_1` | ap-northeast-1 | +| `AP_NORTHEAST_2` | ap-northeast-2 | +| `AP_NORTHEAST_3` | ap-northeast-3 | +| `AP_SOUTH_1` | ap-south-1 | +| `AP_SOUTH_2` | ap-south-2 | +| `AP_SOUTHEAST_1` | ap-southeast-1 | +| `AP_SOUTHEAST_2` | ap-southeast-2 | +| `AP_SOUTHEAST_3` | ap-southeast-3 | +| `AP_SOUTHEAST_4` | ap-southeast-4 | +| `CA_CENTRAL_1` | ca-central-1 | +| `CA_WEST_1` | ca-west-1 | +| `CN_NORTH_1` | cn-north-1 | +| `CN_NORTHWEST_1` | cn-northwest-1 | +| `EU_CENTRAL_1` | eu-central-1 | +| `EU_CENTRAL_2` | eu-central-2 | +| `EU_NORTH_1` | eu-north-1 | +| `EU_SOUTH_1` | eu-south-1 | +| `EU_SOUTH_2` | eu-south-2 | +| `EU_WEST_1` | eu-west-1 | +| `EU_WEST_2` | eu-west-2 | +| `EU_WEST_3` | eu-west-3 | +| `IL_CENTRAL_1` | il-central-1 | +| `ME_CENTRAL_1` | me-central-1 | +| `ME_SOUTH_1` | me-south-1 | +| `SA_EAST_1` | sa-east-1 | +| `US_EAST_1` | us-east-1 | +| `US_EAST_2` | us-east-2 | +| `US_GOV_EAST_1` | us-gov-east-1 | +| `US_GOV_WEST_1` | us-gov-west-1 | +| `US_WEST_1` | us-west-1 | +| `US_WEST_2` | us-west-2 | \ No newline at end of file diff --git a/docs/models/destinationsalesforcesalesforce.md b/docs/models/destinationsalesforcesalesforce.md new file mode 100644 index 00000000..24f2f4e8 --- /dev/null +++ b/docs/models/destinationsalesforcesalesforce.md @@ -0,0 +1,8 @@ +# DestinationSalesforceSalesforce + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `SALESFORCE` | salesforce | \ No newline at end of file diff --git a/docs/models/destinationsalesforceschemasstoragetype.md b/docs/models/destinationsalesforceschemasstoragetype.md new file mode 100644 index 00000000..ec15df9c --- /dev/null +++ b/docs/models/destinationsalesforceschemasstoragetype.md @@ -0,0 +1,8 @@ +# DestinationSalesforceSchemasStorageType + + +## Values + +| Name | Value | +| ----- | ----- | +| `S3` | S3 | \ No newline at end of file diff --git a/docs/models/destinationsalesforcestoragetype.md b/docs/models/destinationsalesforcestoragetype.md new file mode 100644 index 00000000..6a9ce704 --- /dev/null +++ b/docs/models/destinationsalesforcestoragetype.md @@ -0,0 +1,8 @@ +# DestinationSalesforceStorageType + + +## Values + +| Name | Value | +| ------ | ------ | +| `NONE` | None | \ No newline at end of file diff --git a/docs/models/destinationsnowflake.md b/docs/models/destinationsnowflake.md index fe819fb4..0081ac0e 100644 --- a/docs/models/destinationsnowflake.md +++ b/docs/models/destinationsnowflake.md @@ -12,7 +12,7 @@ | `username` | *str* | :heavy_check_mark: | Enter the name of the user you want to use to access the database | AIRBYTE_USER | | `warehouse` | *str* | :heavy_check_mark: | Enter the name of the warehouse that you want to use as a compute cluster | AIRBYTE_WAREHOUSE | | `credentials` | [Optional[models.AuthorizationMethod]](../models/authorizationmethod.md) | :heavy_minus_sign: | N/A | | -| `destination_type` | [models.DestinationSnowflakeSnowflake](../models/destinationsnowflakesnowflake.md) | :heavy_check_mark: | N/A | | +| `destination_type` | [models.Snowflake](../models/snowflake.md) | :heavy_check_mark: | N/A | | | `disable_type_dedupe` | *Optional[bool]* | :heavy_minus_sign: | Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions | | | `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3 | | | `raw_data_schema` | *Optional[str]* | :heavy_minus_sign: | The schema to write raw tables into (default: airbyte_internal) | | diff --git a/docs/models/destinationsurrealdb.md b/docs/models/destinationsurrealdb.md new file mode 100644 index 00000000..3b56ab33 --- /dev/null +++ b/docs/models/destinationsurrealdb.md @@ -0,0 +1,13 @@ +# DestinationSurrealdb + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | +| `surrealdb_password` | *str* | :heavy_check_mark: | The password to use in SurrealDB. | +| `surrealdb_url` | *str* | :heavy_check_mark: | The URL of the SurrealDB instance. | +| `surrealdb_username` | *str* | :heavy_check_mark: | The username to use in SurrealDB. | +| `destination_type` | [models.Surrealdb](../models/surrealdb.md) | :heavy_check_mark: | N/A | +| `surrealdb_database` | *Optional[str]* | :heavy_minus_sign: | The database to use in SurrealDB. | +| `surrealdb_namespace` | *Optional[str]* | :heavy_minus_sign: | The namespace to use in SurrealDB. | \ No newline at end of file diff --git a/docs/models/destinationteradata.md b/docs/models/destinationteradata.md index d898b1ce..fc1e636f 100644 --- a/docs/models/destinationteradata.md +++ b/docs/models/destinationteradata.md @@ -6,10 +6,13 @@ | Field | Type | Required | Description | Example | | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `host` | *str* | :heavy_check_mark: | Hostname of the database. | | -| `username` | *str* | :heavy_check_mark: | Username to use to access the database. | | | `destination_type` | [models.Teradata](../models/teradata.md) | :heavy_check_mark: | N/A | | +| `disable_type_dedupe` | *Optional[bool]* | :heavy_minus_sign: | Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions | | +| `drop_cascade` | *Optional[bool]* | :heavy_minus_sign: | Drop tables with CASCADE. WARNING! This will delete all data in all dependent objects (views, etc.). Use with caution. This option is intended for usecases which can easily rebuild the dependent objects. | | | `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). | | -| `password` | *Optional[str]* | :heavy_minus_sign: | Password associated with the username. | | +| `logmech` | [Optional[models.AuthorizationMechanism]](../models/authorizationmechanism.md) | :heavy_minus_sign: | N/A | | +| `query_band` | *Optional[str]* | :heavy_minus_sign: | Defines the custom session query band using name-value pairs. For example, 'org=Finance;report=Fin123;' | | +| `raw_data_schema` | *Optional[str]* | :heavy_minus_sign: | The database to write raw tables into | | | `schema` | *Optional[str]* | :heavy_minus_sign: | The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public". | airbyte_td | -| `ssl` | *Optional[bool]* | :heavy_minus_sign: | Encrypt data using SSL. When activating SSL, please select one of the connection modes. | | +| `ssl` | *Optional[bool]* | :heavy_minus_sign: | Encrypt data using SSL. When activating SSL, please select one of the SSL modes. | | | `ssl_mode` | [Optional[models.DestinationTeradataSSLModes]](../models/destinationteradatasslmodes.md) | :heavy_minus_sign: | SSL connection modes.
disable - Chose this mode to disable encryption of communication between Airbyte and destination database
allow - Chose this mode to enable encryption only when required by the destination database
prefer - Chose this mode to allow unencrypted connection only if the destination database does not support encryption
require - Chose this mode to always require encryption. If the destination database server does not support encryption, connection will fail
verify-ca - Chose this mode to always require encryption and to verify that the destination database server has a valid SSL certificate
verify-full - This is the most secure mode. Chose this mode to always require encryption and to verify the identity of the destination database server
See more information - in the docs. | | \ No newline at end of file diff --git a/docs/models/destinationteradataauthtype.md b/docs/models/destinationteradataauthtype.md new file mode 100644 index 00000000..1370350d --- /dev/null +++ b/docs/models/destinationteradataauthtype.md @@ -0,0 +1,8 @@ +# DestinationTeradataAuthType + + +## Values + +| Name | Value | +| ----- | ----- | +| `TD2` | TD2 | \ No newline at end of file diff --git a/docs/models/destinationteradataschemasauthtype.md b/docs/models/destinationteradataschemasauthtype.md new file mode 100644 index 00000000..718b15f2 --- /dev/null +++ b/docs/models/destinationteradataschemasauthtype.md @@ -0,0 +1,8 @@ +# DestinationTeradataSchemasAuthType + + +## Values + +| Name | Value | +| ------ | ------ | +| `LDAP` | LDAP | \ No newline at end of file diff --git a/docs/models/destinationtypesense.md b/docs/models/destinationtypesense.md index 3f170764..c06b613e 100644 --- a/docs/models/destinationtypesense.md +++ b/docs/models/destinationtypesense.md @@ -10,5 +10,5 @@ | `batch_size` | *Optional[int]* | :heavy_minus_sign: | How many documents should be imported together. Default 1000 | | `destination_type` | [models.Typesense](../models/typesense.md) | :heavy_check_mark: | N/A | | `path` | *Optional[str]* | :heavy_minus_sign: | Path of the Typesense instance. Default is none | -| `port` | *Optional[str]* | :heavy_minus_sign: | Port of the Typesense instance. Ex: 8108, 80, 443. Default is 443 | +| `port` | *Optional[str]* | :heavy_minus_sign: | Port of the Typesense instance. Ex: 8108, 80, 443. Default is 8108 | | `protocol` | *Optional[str]* | :heavy_minus_sign: | Protocol of the Typesense instance. Ex: http or https. Default is https | \ No newline at end of file diff --git a/docs/models/dingconnect.md b/docs/models/dingconnect.md new file mode 100644 index 00000000..f47e81f0 --- /dev/null +++ b/docs/models/dingconnect.md @@ -0,0 +1,8 @@ +# DingConnect + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `DING_CONNECT` | ding-connect | \ No newline at end of file diff --git a/docs/models/disable.md b/docs/models/disable.md index bb2746fb..a4c5a6f9 100644 --- a/docs/models/disable.md +++ b/docs/models/disable.md @@ -5,6 +5,6 @@ Disable SSL. ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | -| `mode` | [Optional[models.DestinationPostgresSchemasSSLModeSSLModes1Mode]](../models/destinationpostgresschemassslmodesslmodes1mode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | +| `mode` | [Optional[models.DestinationPostgresMode]](../models/destinationpostgresmode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/disabled.md b/docs/models/disabled.md index f5a60c63..fcadfd71 100644 --- a/docs/models/disabled.md +++ b/docs/models/disabled.md @@ -3,6 +3,6 @@ ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | -| `deletion_mode` | [models.SourceFaunaDeletionMode](../models/sourcefaunadeletionmode.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | +| `deletion_mode` | [models.SourceFaunaSchemasDeletionMode](../models/sourcefaunaschemasdeletionmode.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/docuseal.md b/docs/models/docuseal.md new file mode 100644 index 00000000..aa321dcf --- /dev/null +++ b/docs/models/docuseal.md @@ -0,0 +1,8 @@ +# Docuseal + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `DOCUSEAL` | docuseal | \ No newline at end of file diff --git a/docs/models/dolibarr.md b/docs/models/dolibarr.md new file mode 100644 index 00000000..d69601a4 --- /dev/null +++ b/docs/models/dolibarr.md @@ -0,0 +1,8 @@ +# Dolibarr + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `DOLIBARR` | dolibarr | \ No newline at end of file diff --git a/docs/models/destinationicebergcatalogtype.md b/docs/models/dwolla.md similarity index 52% rename from docs/models/destinationicebergcatalogtype.md rename to docs/models/dwolla.md index c84f4220..08505914 100644 --- a/docs/models/destinationicebergcatalogtype.md +++ b/docs/models/dwolla.md @@ -1,8 +1,8 @@ -# DestinationIcebergCatalogType +# Dwolla ## Values | Name | Value | | -------- | -------- | -| `HADOOP` | Hadoop | \ No newline at end of file +| `DWOLLA` | dwolla | \ No newline at end of file diff --git a/docs/models/ebayfinance.md b/docs/models/ebayfinance.md new file mode 100644 index 00000000..f1730b4d --- /dev/null +++ b/docs/models/ebayfinance.md @@ -0,0 +1,8 @@ +# EbayFinance + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `EBAY_FINANCE` | ebay-finance | \ No newline at end of file diff --git a/docs/models/ebayfulfillment.md b/docs/models/ebayfulfillment.md new file mode 100644 index 00000000..0ee9173b --- /dev/null +++ b/docs/models/ebayfulfillment.md @@ -0,0 +1,8 @@ +# EbayFulfillment + + +## Values + +| Name | Value | +| ------------------ | ------------------ | +| `EBAY_FULFILLMENT` | ebay-fulfillment | \ No newline at end of file diff --git a/docs/models/emailnotificationconfig.md b/docs/models/emailnotificationconfig.md new file mode 100644 index 00000000..138b48e9 --- /dev/null +++ b/docs/models/emailnotificationconfig.md @@ -0,0 +1,10 @@ +# EmailNotificationConfig + +Configures an email notification. + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `enabled` | *Optional[bool]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/enabled.md b/docs/models/enabled.md index 07a0b05c..f9ef47b8 100644 --- a/docs/models/enabled.md +++ b/docs/models/enabled.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | -| `column` | *Optional[str]* | :heavy_minus_sign: | Name of the "deleted at" column. | -| `deletion_mode` | [models.SourceFaunaSchemasDeletionMode](../models/sourcefaunaschemasdeletionmode.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | +| `column` | *Optional[str]* | :heavy_minus_sign: | Name of the "deleted at" column. | +| `deletion_mode` | [models.SourceFaunaDeletionMode](../models/sourcefaunadeletionmode.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/encryptedtrustservercertificate.md b/docs/models/encryptedtrustservercertificate.md index 588f841b..3114a2db 100644 --- a/docs/models/encryptedtrustservercertificate.md +++ b/docs/models/encryptedtrustservercertificate.md @@ -5,6 +5,7 @@ Use the certificate provided by the server without verification. (For testing pu ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -| `ssl_method` | [Optional[models.DestinationMssqlSchemasSslMethod]](../models/destinationmssqlschemassslmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `name` | [Optional[models.DestinationMssqlName]](../models/destinationmssqlname.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/encryptedverifycertificate.md b/docs/models/encryptedverifycertificate.md index 7b93ad84..beccad7d 100644 --- a/docs/models/encryptedverifycertificate.md +++ b/docs/models/encryptedverifycertificate.md @@ -7,5 +7,8 @@ Verify and use the certificate provided by the server. | Field | Type | Required | Description | | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | `host_name_in_certificate` | *Optional[str]* | :heavy_minus_sign: | Specifies the host name of the server. The value of this property must match the subject property of the certificate. | -| `ssl_method` | [Optional[models.DestinationMssqlSchemasSslMethodSslMethod]](../models/destinationmssqlschemassslmethodsslmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `name` | [Optional[models.DestinationMssqlSchemasName]](../models/destinationmssqlschemasname.md) | :heavy_minus_sign: | N/A | +| `trust_store_name` | *Optional[str]* | :heavy_minus_sign: | Specifies the name of the trust store. | +| `trust_store_password` | *Optional[str]* | :heavy_minus_sign: | Specifies the password of the trust store. | \ No newline at end of file diff --git a/docs/models/encryption.md b/docs/models/encryption.md index 9156def2..e2f07ac4 100644 --- a/docs/models/encryption.md +++ b/docs/models/encryption.md @@ -1,25 +1,17 @@ # Encryption -The encryption method which is used when communicating with the database. - ## Supported Types -### `models.DestinationOracleUnencrypted` - -```python -value: models.DestinationOracleUnencrypted = /* values here */ -``` - -### `models.NativeNetworkEncryptionNNE` +### `models.EncryptionAES` ```python -value: models.NativeNetworkEncryptionNNE = /* values here */ +value: models.EncryptionAES = /* values here */ ``` -### `models.TLSEncryptedVerifyCertificate` +### `models.EncryptionRSA` ```python -value: models.TLSEncryptedVerifyCertificate = /* values here */ +value: models.EncryptionRSA = /* values here */ ``` diff --git a/docs/models/encryptionaes.md b/docs/models/encryptionaes.md new file mode 100644 index 00000000..2159891b --- /dev/null +++ b/docs/models/encryptionaes.md @@ -0,0 +1,13 @@ +# EncryptionAES + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | +| `algorithm` | [models.EncryptionMapperAlgorithm](../models/encryptionmapperalgorithm.md) | :heavy_check_mark: | N/A | +| `field_name_suffix` | *str* | :heavy_check_mark: | N/A | +| `key` | *str* | :heavy_check_mark: | N/A | +| `mode` | [models.Mode](../models/mode.md) | :heavy_check_mark: | N/A | +| `padding` | [models.Padding](../models/padding.md) | :heavy_check_mark: | N/A | +| `target_field` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/encryptionmapperalgorithm.md b/docs/models/encryptionmapperalgorithm.md new file mode 100644 index 00000000..ce19f31c --- /dev/null +++ b/docs/models/encryptionmapperalgorithm.md @@ -0,0 +1,9 @@ +# EncryptionMapperAlgorithm + + +## Values + +| Name | Value | +| ----- | ----- | +| `RSA` | RSA | +| `AES` | AES | \ No newline at end of file diff --git a/docs/models/encryptionrsa.md b/docs/models/encryptionrsa.md new file mode 100644 index 00000000..216a1c38 --- /dev/null +++ b/docs/models/encryptionrsa.md @@ -0,0 +1,11 @@ +# EncryptionRSA + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | +| `algorithm` | [models.EncryptionMapperAlgorithm](../models/encryptionmapperalgorithm.md) | :heavy_check_mark: | N/A | +| `field_name_suffix` | *str* | :heavy_check_mark: | N/A | +| `public_key` | *str* | :heavy_check_mark: | N/A | +| `target_field` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/enterprise.md b/docs/models/enterprise.md index 7ee36ebb..27843470 100644 --- a/docs/models/enterprise.md +++ b/docs/models/enterprise.md @@ -6,4 +6,4 @@ | Field | Type | Required | Description | | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | | `enterprise_url` | *str* | :heavy_check_mark: | Upgrade to Enterprise to make your API url your-domain.com/API or subdomain.jotform.com/API instead of api.jotform.com | -| `api_endpoint` | [Optional[models.SourceJotformSchemasAPIEndpoint]](../models/sourcejotformschemasapiendpoint.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `api_endpoint` | [Optional[models.SourceJotformAPIEndpoint]](../models/sourcejotformapiendpoint.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/equal.md b/docs/models/equal.md new file mode 100644 index 00000000..c84ad844 --- /dev/null +++ b/docs/models/equal.md @@ -0,0 +1,10 @@ +# Equal + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | +| `comparison_value` | *str* | :heavy_check_mark: | The value to compare the field against. | +| `field_name` | *str* | :heavy_check_mark: | The name of the field to apply the operation on. | +| `type` | [models.RowFilteringOperationType](../models/rowfilteringoperationtype.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/everhour.md b/docs/models/everhour.md new file mode 100644 index 00000000..491e10c5 --- /dev/null +++ b/docs/models/everhour.md @@ -0,0 +1,8 @@ +# Everhour + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `EVERHOUR` | everhour | \ No newline at end of file diff --git a/docs/models/destinations3gluecompressiontype.md b/docs/models/facebookpages.md similarity index 55% rename from docs/models/destinations3gluecompressiontype.md rename to docs/models/facebookpages.md index 2f422c23..39373c60 100644 --- a/docs/models/destinations3gluecompressiontype.md +++ b/docs/models/facebookpages.md @@ -1,8 +1,8 @@ -# DestinationS3GlueCompressionType +# FacebookPages ## Values | Name | Value | | ---------------- | ---------------- | -| `NO_COMPRESSION` | No Compression | \ No newline at end of file +| `FACEBOOK_PAGES` | facebook-pages | \ No newline at end of file diff --git a/docs/models/fake.md b/docs/models/fake.md index 393b164c..9b63eddc 100644 --- a/docs/models/fake.md +++ b/docs/models/fake.md @@ -5,6 +5,6 @@ Use a fake embedding made out of random vectors with 1536 embedding dimensions. ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | -| `mode` | [Optional[models.DestinationAstraSchemasMode]](../models/destinationastraschemasmode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | +| `mode` | [Optional[models.DestinationAstraSchemasEmbeddingMode]](../models/destinationastraschemasembeddingmode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/fastbill.md b/docs/models/fastbill.md new file mode 100644 index 00000000..a26d2d4a --- /dev/null +++ b/docs/models/fastbill.md @@ -0,0 +1,8 @@ +# Fastbill + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `FASTBILL` | fastbill | \ No newline at end of file diff --git a/docs/models/fastly.md b/docs/models/fastly.md new file mode 100644 index 00000000..f6194906 --- /dev/null +++ b/docs/models/fastly.md @@ -0,0 +1,8 @@ +# Fastly + + +## Values + +| Name | Value | +| -------- | -------- | +| `FASTLY` | fastly | \ No newline at end of file diff --git a/docs/models/fieldrenaming.md b/docs/models/fieldrenaming.md new file mode 100644 index 00000000..cd5a558c --- /dev/null +++ b/docs/models/fieldrenaming.md @@ -0,0 +1,9 @@ +# FieldRenaming + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | +| `new_field_name` | *str* | :heavy_check_mark: | The new name for the field after renaming. | +| `original_field_name` | *str* | :heavy_check_mark: | The current name of the field to rename. | \ No newline at end of file diff --git a/docs/models/fields.md b/docs/models/fields.md new file mode 100644 index 00000000..3da34c7e --- /dev/null +++ b/docs/models/fields.md @@ -0,0 +1,39 @@ +# Fields + + +## Values + +| Name | Value | +| ------------------------------ | ------------------------------ | +| `CLICKS` | CLICKS | +| `COMPLETES` | COMPLETES | +| `COMPLETION_RATE` | COMPLETION_RATE | +| `CONVERSION_RATE` | CONVERSION_RATE | +| `CTR` | CTR | +| `E_CPM` | E_CPM | +| `E_CPCL` | E_CPCL | +| `FIRST_QUARTILES` | FIRST_QUARTILES | +| `FREQUENCY` | FREQUENCY | +| `IMPRESSIONS` | IMPRESSIONS | +| `INTENT_RATE` | INTENT_RATE | +| `LISTENERS` | LISTENERS | +| `MIDPOINTS` | MIDPOINTS | +| `NEW_LISTENERS` | NEW_LISTENERS | +| `NEW_LISTENER_CONVERSION_RATE` | NEW_LISTENER_CONVERSION_RATE | +| `NEW_LISTENER_STREAMS` | NEW_LISTENER_STREAMS | +| `OFF_SPOTIFY_IMPRESSIONS` | OFF_SPOTIFY_IMPRESSIONS | +| `PAID_LISTENS` | PAID_LISTENS | +| `PAID_LISTENS_FREQUENCY` | PAID_LISTENS_FREQUENCY | +| `PAID_LISTENS_REACH` | PAID_LISTENS_REACH | +| `REACH` | REACH | +| `SKIPS` | SKIPS | +| `SPEND` | SPEND | +| `STARTS` | STARTS | +| `STREAMS` | STREAMS | +| `STREAMS_PER_NEW_LISTENER` | STREAMS_PER_NEW_LISTENER | +| `STREAMS_PER_USER` | STREAMS_PER_USER | +| `THIRD_QUARTILES` | THIRD_QUARTILES | +| `VIDEO_VIEWS` | VIDEO_VIEWS | +| `VIDEO_EXPANDS` | VIDEO_EXPANDS | +| `VIDEO_EXPAND_RATE` | VIDEO_EXPAND_RATE | +| `UNMUTES` | UNMUTES | \ No newline at end of file diff --git a/docs/models/filebasedstreamconfig.md b/docs/models/filebasedstreamconfig.md index dbe35d48..298b5d5b 100644 --- a/docs/models/filebasedstreamconfig.md +++ b/docs/models/filebasedstreamconfig.md @@ -10,5 +10,6 @@ | `days_to_sync_if_history_is_full` | *Optional[int]* | :heavy_minus_sign: | When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. | | `globs` | List[*str*] | :heavy_minus_sign: | The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here. | | `input_schema` | *Optional[str]* | :heavy_minus_sign: | The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files. | +| `recent_n_files_to_read_for_schema_discovery` | *Optional[int]* | :heavy_minus_sign: | The number of resent files which will be used to discover the schema for this stream. | | `schemaless` | *Optional[bool]* | :heavy_minus_sign: | When enabled, syncs will not validate or structure records against the stream's schema. | | `validation_policy` | [Optional[models.ValidationPolicy]](../models/validationpolicy.md) | :heavy_minus_sign: | The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. | \ No newline at end of file diff --git a/docs/models/fileformat.md b/docs/models/fileformat.md index e80aad59..6ba7f6b2 100644 --- a/docs/models/fileformat.md +++ b/docs/models/fileformat.md @@ -1,13 +1,18 @@ # FileFormat -File format of Iceberg storage. +The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs). -## Fields +## Values -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `auto_compact` | *Optional[bool]* | :heavy_minus_sign: | Auto compact data files when stream close | -| `compact_target_file_size_in_mb` | *Optional[int]* | :heavy_minus_sign: | Specify the target size of Iceberg data file when performing a compaction action. | -| `flush_batch_size` | *Optional[int]* | :heavy_minus_sign: | Iceberg data file flush batch size. Incoming rows write to cache firstly; When cache size reaches this 'batch size', flush into real Iceberg data file. | -| `format` | [Optional[models.FileStorageFormat]](../models/filestorageformat.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Name | Value | +| -------------- | -------------- | +| `CSV` | csv | +| `JSON` | json | +| `JSONL` | jsonl | +| `EXCEL` | excel | +| `EXCEL_BINARY` | excel_binary | +| `FWF` | fwf | +| `FEATHER` | feather | +| `PARQUET` | parquet | +| `YAML` | yaml | \ No newline at end of file diff --git a/docs/models/filestorageformat.md b/docs/models/filestorageformat.md deleted file mode 100644 index 536b5dca..00000000 --- a/docs/models/filestorageformat.md +++ /dev/null @@ -1,9 +0,0 @@ -# FileStorageFormat - - -## Values - -| Name | Value | -| --------- | --------- | -| `PARQUET` | Parquet | -| `AVRO` | Avro | \ No newline at end of file diff --git a/docs/models/financialeventsstepsizeindays.md b/docs/models/financialeventsstepsizeindays.md new file mode 100644 index 00000000..d72020ce --- /dev/null +++ b/docs/models/financialeventsstepsizeindays.md @@ -0,0 +1,21 @@ +# FinancialEventsStepSizeInDays + +The time window size (in days) for fetching financial events data in chunks. Options are 1 day, 7 days, 14 days, 30 days, 60 days, and 190 days, based on API limitations. + +- **Smaller step sizes (e.g., 1 day)** are better for large data volumes. They fetch smaller chunks per request, reducing the risk of timeouts or overwhelming the API, though more requests may slow syncing and increase the chance of hitting rate limits. +- **Larger step sizes (e.g., 14 days)** are better for smaller data volumes. They fetch more data per request, speeding up syncing and reducing the number of API calls, which minimizes strain on rate limits. + +Select a step size that matches your data volume to optimize syncing speed and API performance. + + +## Values + +| Name | Value | +| ------------------------ | ------------------------ | +| `ONE` | 1 | +| `SEVEN` | 7 | +| `FOURTEEN` | 14 | +| `THIRTY` | 30 | +| `SIXTY` | 60 | +| `NINETY` | 90 | +| `ONE_HUNDRED_AND_EIGHTY` | 180 | \ No newline at end of file diff --git a/docs/models/flattening.md b/docs/models/flattening.md index d0897bee..e8196cb2 100644 --- a/docs/models/flattening.md +++ b/docs/models/flattening.md @@ -1,7 +1,5 @@ # Flattening -Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details. - ## Values diff --git a/docs/models/format.md b/docs/models/format.md index 6259259d..9f11e596 100644 --- a/docs/models/format.md +++ b/docs/models/format.md @@ -29,9 +29,9 @@ value: models.JsonlFormat = /* values here */ value: models.ParquetFormat = /* values here */ ``` -### `models.DocumentFileTypeFormatExperimental` +### `models.UnstructuredDocumentFormat` ```python -value: models.DocumentFileTypeFormatExperimental = /* values here */ +value: models.UnstructuredDocumentFormat = /* values here */ ``` diff --git a/docs/models/fullstory.md b/docs/models/fullstory.md new file mode 100644 index 00000000..d7d25d36 --- /dev/null +++ b/docs/models/fullstory.md @@ -0,0 +1,8 @@ +# Fullstory + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `FULLSTORY` | fullstory | \ No newline at end of file diff --git a/docs/models/gcsstaging.md b/docs/models/gcsstaging.md index 716bc359..756d4761 100644 --- a/docs/models/gcsstaging.md +++ b/docs/models/gcsstaging.md @@ -10,5 +10,6 @@ Writes large batches of records to a file, uploads the file to GCS, then uses CO | `credential` | [models.Credential](../models/credential.md) | :heavy_check_mark: | An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. | | | `gcs_bucket_name` | *str* | :heavy_check_mark: | The name of the GCS bucket. Read more here. | airbyte_sync | | `gcs_bucket_path` | *str* | :heavy_check_mark: | Directory under the GCS bucket where data will be written. | data_sync/test | -| `keep_files_in_gcs_bucket` | [Optional[models.GCSTmpFilesAfterwardProcessing]](../models/gcstmpfilesafterwardprocessing.md) | :heavy_minus_sign: | This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly. | | -| `method` | [models.DestinationBigqueryMethod](../models/destinationbigquerymethod.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | +| `keep_files_in_gcs_bucket` | [Optional[models.GCSTmpFilesPostProcessing]](../models/gcstmpfilespostprocessing.md) | :heavy_minus_sign: | This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly. | | +| `method` | [Optional[models.DestinationBigqueryMethod]](../models/destinationbigquerymethod.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/gcstmpfilesafterwardprocessing.md b/docs/models/gcstmpfilespostprocessing.md similarity index 94% rename from docs/models/gcstmpfilesafterwardprocessing.md rename to docs/models/gcstmpfilespostprocessing.md index f9eba8d8..4070d7d3 100644 --- a/docs/models/gcstmpfilesafterwardprocessing.md +++ b/docs/models/gcstmpfilespostprocessing.md @@ -1,4 +1,4 @@ -# GCSTmpFilesAfterwardProcessing +# GCSTmpFilesPostProcessing This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly. diff --git a/docs/models/geographyenum.md b/docs/models/geographyenum.md deleted file mode 100644 index 169a34e0..00000000 --- a/docs/models/geographyenum.md +++ /dev/null @@ -1,10 +0,0 @@ -# GeographyEnum - - -## Values - -| Name | Value | -| ------ | ------ | -| `AUTO` | auto | -| `US` | us | -| `EU` | eu | \ No newline at end of file diff --git a/docs/models/geographyenumnodefault.md b/docs/models/geographyenumnodefault.md deleted file mode 100644 index bd31096f..00000000 --- a/docs/models/geographyenumnodefault.md +++ /dev/null @@ -1,10 +0,0 @@ -# GeographyEnumNoDefault - - -## Values - -| Name | Value | -| ------ | ------ | -| `AUTO` | auto | -| `US` | us | -| `EU` | eu | \ No newline at end of file diff --git a/docs/models/orbit.md b/docs/models/giphy.md similarity index 66% rename from docs/models/orbit.md rename to docs/models/giphy.md index ae74911e..27ec99fe 100644 --- a/docs/models/orbit.md +++ b/docs/models/giphy.md @@ -1,8 +1,8 @@ -# Orbit +# Giphy ## Values | Name | Value | | ------- | ------- | -| `ORBIT` | orbit | \ No newline at end of file +| `GIPHY` | giphy | \ No newline at end of file diff --git a/docs/models/gluecatalog.md b/docs/models/gluecatalog.md index 3e3fa5da..bf50d4de 100644 --- a/docs/models/gluecatalog.md +++ b/docs/models/gluecatalog.md @@ -1,11 +1,14 @@ # GlueCatalog -The GlueCatalog connects to a AWS Glue Catalog +Configuration details for connecting to an AWS Glue-based Iceberg catalog. ## Fields -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `catalog_type` | [Optional[models.DestinationIcebergSchemasCatalogConfigIcebergCatalogConfigCatalogType]](../models/destinationicebergschemascatalogconfigicebergcatalogconfigcatalogtype.md) | :heavy_minus_sign: | N/A | | -| `database` | *Optional[str]* | :heavy_minus_sign: | The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public". | public | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `database_name` | *str* | :heavy_check_mark: | The Glue database name. This will ONLY be used if the `Destination Namespace` setting for the connection is set to `Destination-defined` or `Source-defined` | +| `glue_id` | *str* | :heavy_check_mark: | The AWS Account ID associated with the Glue service used by the Iceberg catalog. | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `catalog_type` | [Optional[models.DestinationS3DataLakeCatalogType]](../models/destinations3datalakecatalogtype.md) | :heavy_minus_sign: | N/A | +| `role_arn` | *Optional[str]* | :heavy_minus_sign: | The ARN of the AWS role to assume. Only usable in Airbyte Cloud. | \ No newline at end of file diff --git a/docs/models/iceberg.md b/docs/models/gologin.md similarity index 65% rename from docs/models/iceberg.md rename to docs/models/gologin.md index 34ec189e..4e6006a7 100644 --- a/docs/models/iceberg.md +++ b/docs/models/gologin.md @@ -1,8 +1,8 @@ -# Iceberg +# Gologin ## Values | Name | Value | | --------- | --------- | -| `ICEBERG` | iceberg | \ No newline at end of file +| `GOLOGIN` | gologin | \ No newline at end of file diff --git a/docs/models/greythr.md b/docs/models/greythr.md new file mode 100644 index 00000000..5d035c10 --- /dev/null +++ b/docs/models/greythr.md @@ -0,0 +1,8 @@ +# Greythr + + +## Values + +| Name | Value | +| --------- | --------- | +| `GREYTHR` | greythr | \ No newline at end of file diff --git a/docs/models/harness.md b/docs/models/harness.md new file mode 100644 index 00000000..3275bc2d --- /dev/null +++ b/docs/models/harness.md @@ -0,0 +1,8 @@ +# Harness + + +## Values + +| Name | Value | +| --------- | --------- | +| `HARNESS` | harness | \ No newline at end of file diff --git a/docs/models/hashing.md b/docs/models/hashing.md new file mode 100644 index 00000000..52130323 --- /dev/null +++ b/docs/models/hashing.md @@ -0,0 +1,10 @@ +# Hashing + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------- | ----------------------------------------------------- | ----------------------------------------------------- | ----------------------------------------------------- | +| `field_name_suffix` | *str* | :heavy_check_mark: | The suffix to append to the field name after hashing. | +| `method` | [models.HashingMethod](../models/hashingmethod.md) | :heavy_check_mark: | The hashing algorithm to use. | +| `target_field` | *str* | :heavy_check_mark: | The name of the field to be hashed. | \ No newline at end of file diff --git a/docs/models/hashingmethod.md b/docs/models/hashingmethod.md new file mode 100644 index 00000000..624655b5 --- /dev/null +++ b/docs/models/hashingmethod.md @@ -0,0 +1,16 @@ +# HashingMethod + +The hashing algorithm to use. + + +## Values + +| Name | Value | +| --------- | --------- | +| `MD2` | MD2 | +| `MD5` | MD5 | +| `SHA_1` | SHA-1 | +| `SHA_224` | SHA-224 | +| `SHA_256` | SHA-256 | +| `SHA_384` | SHA-384 | +| `SHA_512` | SHA-512 | \ No newline at end of file diff --git a/docs/models/hellobaton.md b/docs/models/hellobaton.md new file mode 100644 index 00000000..b5df54c2 --- /dev/null +++ b/docs/models/hellobaton.md @@ -0,0 +1,8 @@ +# Hellobaton + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `HELLOBATON` | hellobaton | \ No newline at end of file diff --git a/docs/models/helpscout.md b/docs/models/helpscout.md new file mode 100644 index 00000000..54459c5b --- /dev/null +++ b/docs/models/helpscout.md @@ -0,0 +1,8 @@ +# HelpScout + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `HELP_SCOUT` | help-scout | \ No newline at end of file diff --git a/docs/models/hoorayhr.md b/docs/models/hoorayhr.md new file mode 100644 index 00000000..1447b920 --- /dev/null +++ b/docs/models/hoorayhr.md @@ -0,0 +1,8 @@ +# Hoorayhr + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `HOORAYHR` | hoorayhr | \ No newline at end of file diff --git a/docs/models/huggingfacedatasets.md b/docs/models/huggingfacedatasets.md new file mode 100644 index 00000000..d3a4abba --- /dev/null +++ b/docs/models/huggingfacedatasets.md @@ -0,0 +1,8 @@ +# HuggingFaceDatasets + + +## Values + +| Name | Value | +| ----------------------- | ----------------------- | +| `HUGGING_FACE_DATASETS` | hugging-face-datasets | \ No newline at end of file diff --git a/docs/models/destinations3glueformattype.md b/docs/models/huntr.md similarity index 52% rename from docs/models/destinations3glueformattype.md rename to docs/models/huntr.md index f1aa2217..85bb5ad6 100644 --- a/docs/models/destinations3glueformattype.md +++ b/docs/models/huntr.md @@ -1,8 +1,8 @@ -# DestinationS3GlueFormatType +# Huntr ## Values | Name | Value | | ------- | ------- | -| `JSONL` | JSONL | \ No newline at end of file +| `HUNTR` | huntr | \ No newline at end of file diff --git a/docs/models/icebergcatalogconfig.md b/docs/models/icebergcatalogconfig.md deleted file mode 100644 index 6e1c4af3..00000000 --- a/docs/models/icebergcatalogconfig.md +++ /dev/null @@ -1,37 +0,0 @@ -# IcebergCatalogConfig - -Catalog config of Iceberg. - - -## Supported Types - -### `models.HiveCatalogUseApacheHiveMetaStore` - -```python -value: models.HiveCatalogUseApacheHiveMetaStore = /* values here */ -``` - -### `models.HadoopCatalogUseHierarchicalFileSystemsAsSameAsStorageConfig` - -```python -value: models.HadoopCatalogUseHierarchicalFileSystemsAsSameAsStorageConfig = /* values here */ -``` - -### `models.JdbcCatalogUseRelationalDatabase` - -```python -value: models.JdbcCatalogUseRelationalDatabase = /* values here */ -``` - -### `models.RESTCatalog` - -```python -value: models.RESTCatalog = /* values here */ -``` - -### `models.GlueCatalog` - -```python -value: models.GlueCatalog = /* values here */ -``` - diff --git a/docs/models/imagga.md b/docs/models/imagga.md new file mode 100644 index 00000000..a9562bea --- /dev/null +++ b/docs/models/imagga.md @@ -0,0 +1,8 @@ +# Imagga + + +## Values + +| Name | Value | +| -------- | -------- | +| `IMAGGA` | imagga | \ No newline at end of file diff --git a/docs/models/insertload.md b/docs/models/insertload.md new file mode 100644 index 00000000..6e904f2e --- /dev/null +++ b/docs/models/insertload.md @@ -0,0 +1,11 @@ +# InsertLoad + +Configuration details for using the INSERT loading mechanism. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `load_type` | [Optional[models.DestinationMssqlSchemasLoadType]](../models/destinationmssqlschemasloadtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/insightconfig.md b/docs/models/insightconfig.md index 66986c86..97a8f1fb 100644 --- a/docs/models/insightconfig.md +++ b/docs/models/insightconfig.md @@ -9,7 +9,6 @@ Config for custom insights | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `name` | *str* | :heavy_check_mark: | The name value of insight | | | `action_breakdowns` | List[[models.ValidActionBreakdowns](../models/validactionbreakdowns.md)] | :heavy_minus_sign: | A list of chosen action_breakdowns for action_breakdowns | | -| `action_report_time` | [Optional[models.SourceFacebookMarketingActionReportTime]](../models/sourcefacebookmarketingactionreporttime.md) | :heavy_minus_sign: | Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd. | | | `breakdowns` | List[[models.ValidBreakdowns](../models/validbreakdowns.md)] | :heavy_minus_sign: | A list of chosen breakdowns for breakdowns | | | `end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date until which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data. | 2017-01-26T00:00:00Z | | `fields` | List[[models.SourceFacebookMarketingValidEnums](../models/sourcefacebookmarketingvalidenums.md)] | :heavy_minus_sign: | A list of chosen fields for fields parameter | | diff --git a/docs/models/insightful.md b/docs/models/insightful.md new file mode 100644 index 00000000..14e4d6f0 --- /dev/null +++ b/docs/models/insightful.md @@ -0,0 +1,8 @@ +# Insightful + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `INSIGHTFUL` | insightful | \ No newline at end of file diff --git a/docs/models/intercom.md b/docs/models/intercom.md index 32623139..2e1e6d6e 100644 --- a/docs/models/intercom.md +++ b/docs/models/intercom.md @@ -1,9 +1,8 @@ # Intercom -## Fields +## Values -| Field | Type | Required | Description | -| -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | -| `client_id` | *Optional[str]* | :heavy_minus_sign: | Client Id for your Intercom application. | -| `client_secret` | *Optional[str]* | :heavy_minus_sign: | Client Secret for your Intercom application. | \ No newline at end of file +| Name | Value | +| ---------- | ---------- | +| `INTERCOM` | intercom | \ No newline at end of file diff --git a/docs/models/interval.md b/docs/models/interval.md index 295b387e..fd19aa39 100644 --- a/docs/models/interval.md +++ b/docs/models/interval.md @@ -1,20 +1,15 @@ # Interval -Between two consecutive points in time series Supports: 1min, 5min, 15min, 30min, 45min, 1h, 2h, 4h, 1day, 1week, 1month +Time-series data point interval. Required for intraday endpoints. + ## Values -| Name | Value | -| --------------- | --------------- | -| `ONEMIN` | 1min | -| `FIVEMIN` | 5min | -| `FIFTEENMIN` | 15min | -| `THIRTYMIN` | 30min | -| `FORTY_FIVEMIN` | 45min | -| `ONEH` | 1h | -| `TWOH` | 2h | -| `FOURH` | 4h | -| `ONEDAY` | 1day | -| `ONEWEEK` | 1week | -| `ONEMONTH` | 1month | \ No newline at end of file +| Name | Value | +| ------------ | ------------ | +| `ONEMIN` | 1min | +| `FIVEMIN` | 5min | +| `FIFTEENMIN` | 15min | +| `THIRTYMIN` | 30min | +| `SIXTYMIN` | 60min | \ No newline at end of file diff --git a/docs/models/intruder.md b/docs/models/intruder.md new file mode 100644 index 00000000..d66109f3 --- /dev/null +++ b/docs/models/intruder.md @@ -0,0 +1,8 @@ +# Intruder + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `INTRUDER` | intruder | \ No newline at end of file diff --git a/docs/models/jamfpro.md b/docs/models/jamfpro.md new file mode 100644 index 00000000..b337fc45 --- /dev/null +++ b/docs/models/jamfpro.md @@ -0,0 +1,8 @@ +# JamfPro + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `JAMF_PRO` | jamf-pro | \ No newline at end of file diff --git a/docs/models/jdbccataloguserelationaldatabase.md b/docs/models/jdbccataloguserelationaldatabase.md deleted file mode 100644 index 2e90a477..00000000 --- a/docs/models/jdbccataloguserelationaldatabase.md +++ /dev/null @@ -1,16 +0,0 @@ -# JdbcCatalogUseRelationalDatabase - -Using a table in a relational database to manage Iceberg tables through JDBC. Read more here. Supporting: PostgreSQL - - -## Fields - -| Field | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -| `catalog_schema` | *Optional[str]* | :heavy_minus_sign: | Iceberg catalog metadata tables are written to catalog schema. The usual value for this field is "public". | public | -| `catalog_type` | [Optional[models.DestinationIcebergSchemasCatalogType]](../models/destinationicebergschemascatalogtype.md) | :heavy_minus_sign: | N/A | | -| `database` | *Optional[str]* | :heavy_minus_sign: | The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public". | public | -| `jdbc_url` | *Optional[str]* | :heavy_minus_sign: | N/A | jdbc:postgresql://{host}:{port}/{database} | -| `password` | *Optional[str]* | :heavy_minus_sign: | Password associated with the username. | | -| `ssl` | *Optional[bool]* | :heavy_minus_sign: | Encrypt data using SSL. When activating SSL, please select one of the connection modes. | | -| `username` | *Optional[str]* | :heavy_minus_sign: | Username to use to access the database. | | \ No newline at end of file diff --git a/docs/models/jobtype.md b/docs/models/jobtype.md new file mode 100644 index 00000000..98ce208c --- /dev/null +++ b/docs/models/jobtype.md @@ -0,0 +1,16 @@ +# JobType + +enum that describes the different types of jobs that the platform runs. + + +## Values + +| Name | Value | +| -------------------- | -------------------- | +| `GET_SPEC` | get_spec | +| `CHECK_CONNECTION` | check_connection | +| `DISCOVER_SCHEMA` | discover_schema | +| `SYNC` | sync | +| `RESET_CONNECTION` | reset_connection | +| `CONNECTION_UPDATER` | connection_updater | +| `REPLICATE` | replicate | \ No newline at end of file diff --git a/docs/models/jobtyperesourcelimit.md b/docs/models/jobtyperesourcelimit.md new file mode 100644 index 00000000..163c235e --- /dev/null +++ b/docs/models/jobtyperesourcelimit.md @@ -0,0 +1,11 @@ +# JobTypeResourceLimit + +sets resource requirements for a specific job type for an actor or actor definition. these values override the default, if both are set. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | +| `job_type` | [models.JobType](../models/jobtype.md) | :heavy_check_mark: | enum that describes the different types of jobs that the platform runs. | +| `resource_requirements` | [models.ResourceRequirements](../models/resourcerequirements.md) | :heavy_check_mark: | optional resource requirements to run workers (blank for unbounded allocations) | \ No newline at end of file diff --git a/docs/models/judgemereviews.md b/docs/models/judgemereviews.md new file mode 100644 index 00000000..61e0547b --- /dev/null +++ b/docs/models/judgemereviews.md @@ -0,0 +1,8 @@ +# JudgeMeReviews + + +## Values + +| Name | Value | +| ------------------ | ------------------ | +| `JUDGE_ME_REVIEWS` | judge-me-reviews | \ No newline at end of file diff --git a/docs/models/keka.md b/docs/models/keka.md new file mode 100644 index 00000000..08fe091f --- /dev/null +++ b/docs/models/keka.md @@ -0,0 +1,8 @@ +# Keka + + +## Values + +| Name | Value | +| ------ | ------ | +| `KEKA` | keka | \ No newline at end of file diff --git a/docs/models/ldap.md b/docs/models/ldap.md new file mode 100644 index 00000000..27684da4 --- /dev/null +++ b/docs/models/ldap.md @@ -0,0 +1,10 @@ +# Ldap + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | +| `password` | *str* | :heavy_check_mark: | Enter the password associated with the username. | +| `username` | *str* | :heavy_check_mark: | Username to use to access the database. | +| `auth_type` | [Optional[models.DestinationTeradataSchemasAuthType]](../models/destinationteradataschemasauthtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/linear.md b/docs/models/linear.md new file mode 100644 index 00000000..29787cba --- /dev/null +++ b/docs/models/linear.md @@ -0,0 +1,8 @@ +# Linear + + +## Values + +| Name | Value | +| -------- | -------- | +| `LINEAR` | linear | \ No newline at end of file diff --git a/docs/models/loadtype.md b/docs/models/loadtype.md new file mode 100644 index 00000000..d01ed7bc --- /dev/null +++ b/docs/models/loadtype.md @@ -0,0 +1,19 @@ +# LoadType + +Specifies the type of load mechanism (e.g., BULK, INSERT) and its associated configuration. + + +## Supported Types + +### `models.InsertLoad` + +```python +value: models.InsertLoad = /* values here */ +``` + +### `models.BulkLoad` + +```python +value: models.BulkLoad = /* values here */ +``` + diff --git a/docs/models/mailersend.md b/docs/models/mailersend.md new file mode 100644 index 00000000..fcaec8ed --- /dev/null +++ b/docs/models/mailersend.md @@ -0,0 +1,8 @@ +# Mailersend + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `MAILERSEND` | mailersend | \ No newline at end of file diff --git a/docs/models/mapperconfiguration.md b/docs/models/mapperconfiguration.md index 1563d94f..90334a35 100644 --- a/docs/models/mapperconfiguration.md +++ b/docs/models/mapperconfiguration.md @@ -3,7 +3,29 @@ The values required to configure the mapper. -## Fields +## Supported Types + +### `models.Hashing` + +```python +value: models.Hashing = /* values here */ +``` + +### `models.FieldRenaming` + +```python +value: models.FieldRenaming = /* values here */ +``` + +### `models.RowFiltering` + +```python +value: models.RowFiltering = /* values here */ +``` + +### `models.Encryption` + +```python +value: models.Encryption = /* values here */ +``` -| Field | Type | Required | Description | -| ----------- | ----------- | ----------- | ----------- | \ No newline at end of file diff --git a/docs/models/mendeley.md b/docs/models/mendeley.md new file mode 100644 index 00000000..cd560290 --- /dev/null +++ b/docs/models/mendeley.md @@ -0,0 +1,8 @@ +# Mendeley + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `MENDELEY` | mendeley | \ No newline at end of file diff --git a/docs/models/mercadoads.md b/docs/models/mercadoads.md new file mode 100644 index 00000000..53ffebee --- /dev/null +++ b/docs/models/mercadoads.md @@ -0,0 +1,8 @@ +# MercadoAds + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `MERCADO_ADS` | mercado-ads | \ No newline at end of file diff --git a/docs/models/merge.md b/docs/models/merge.md new file mode 100644 index 00000000..cb0a15b8 --- /dev/null +++ b/docs/models/merge.md @@ -0,0 +1,8 @@ +# Merge + + +## Values + +| Name | Value | +| ------- | ------- | +| `MERGE` | merge | \ No newline at end of file diff --git a/docs/models/mode.md b/docs/models/mode.md index 3f9f5116..b1b22629 100644 --- a/docs/models/mode.md +++ b/docs/models/mode.md @@ -3,6 +3,11 @@ ## Values -| Name | Value | -| ------ | ------ | -| `MODE` | mode | \ No newline at end of file +| Name | Value | +| ----- | ----- | +| `CBC` | CBC | +| `CFB` | CFB | +| `OFB` | OFB | +| `CTR` | CTR | +| `GCM` | GCM | +| `ECB` | ECB | \ No newline at end of file diff --git a/docs/models/mongodbatlasreplicaset.md b/docs/models/mongodbatlasreplicaset.md index c0a3fabf..2400509e 100644 --- a/docs/models/mongodbatlasreplicaset.md +++ b/docs/models/mongodbatlasreplicaset.md @@ -8,10 +8,10 @@ MongoDB Atlas-hosted cluster configured as a replica set | Field | Type | Required | Description | Example | | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `connection_string` | *str* | :heavy_check_mark: | The connection string of the cluster that you want to replicate. | mongodb+srv://cluster0.abcd1.mongodb.net/ | -| `database` | *str* | :heavy_check_mark: | The name of the MongoDB database that contains the collection(s) to replicate. | | +| `databases` | List[*str*] | :heavy_check_mark: | The names of the MongoDB databases that contain the collection(s) to replicate. | | | `password` | *str* | :heavy_check_mark: | The password associated with this username. | | | `username` | *str* | :heavy_check_mark: | The username which is used to access the database. | | | `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | | `auth_source` | *Optional[str]* | :heavy_minus_sign: | The authentication source where the user information is stored. See https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.authSource for more details. | admin | -| `cluster_type` | [models.SourceMongodbV2ClusterType](../models/sourcemongodbv2clustertype.md) | :heavy_check_mark: | N/A | | +| `cluster_type` | [models.SourceMongodbV2SchemasClusterType](../models/sourcemongodbv2schemasclustertype.md) | :heavy_check_mark: | N/A | | | `schema_enforced` | *Optional[bool]* | :heavy_minus_sign: | When enabled, syncs will validate and structure records against the stream's schema. | | \ No newline at end of file diff --git a/docs/models/mssqlv2.md b/docs/models/mssqlv2.md new file mode 100644 index 00000000..c3ee20a5 --- /dev/null +++ b/docs/models/mssqlv2.md @@ -0,0 +1,8 @@ +# MssqlV2 + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `MSSQL_V2` | mssql-v2 | \ No newline at end of file diff --git a/docs/models/name.md b/docs/models/name.md new file mode 100644 index 00000000..8f85b750 --- /dev/null +++ b/docs/models/name.md @@ -0,0 +1,8 @@ +# Name + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `UNENCRYPTED` | unencrypted | \ No newline at end of file diff --git a/docs/models/navan.md b/docs/models/navan.md new file mode 100644 index 00000000..34fd3a88 --- /dev/null +++ b/docs/models/navan.md @@ -0,0 +1,8 @@ +# Navan + + +## Values + +| Name | Value | +| ------- | ------- | +| `NAVAN` | navan | \ No newline at end of file diff --git a/docs/models/nebiusai.md b/docs/models/nebiusai.md new file mode 100644 index 00000000..ee81ebec --- /dev/null +++ b/docs/models/nebiusai.md @@ -0,0 +1,8 @@ +# NebiusAi + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `NEBIUS_AI` | nebius-ai | \ No newline at end of file diff --git a/docs/models/nessiecatalog.md b/docs/models/nessiecatalog.md new file mode 100644 index 00000000..2dabc860 --- /dev/null +++ b/docs/models/nessiecatalog.md @@ -0,0 +1,14 @@ +# NessieCatalog + +Configuration details for connecting to a Nessie-based Iceberg catalog. + + +## Fields + +| Field | Type | Required | Description | Example | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `namespace` | *str* | :heavy_check_mark: | The Nessie namespace to be used in the Table identifier.
This will ONLY be used if the `Destination Namespace` setting for the connection is set to
`Destination-defined` or `Source-defined` | | +| `server_uri` | *str* | :heavy_check_mark: | The base URL of the Nessie server used to connect to the Nessie catalog. | | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | +| `access_token` | *Optional[str]* | :heavy_minus_sign: | Optional token for authentication with the Nessie server. | a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY | +| `catalog_type` | [Optional[models.DestinationS3DataLakeSchemasCatalogTypeCatalogType]](../models/destinations3datalakeschemascatalogtypecatalogtype.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/netsuiteenterprise.md b/docs/models/netsuiteenterprise.md new file mode 100644 index 00000000..af3a35db --- /dev/null +++ b/docs/models/netsuiteenterprise.md @@ -0,0 +1,8 @@ +# NetsuiteEnterprise + + +## Values + +| Name | Value | +| --------------------- | --------------------- | +| `NETSUITE_ENTERPRISE` | netsuite-enterprise | \ No newline at end of file diff --git a/docs/models/newsdata.md b/docs/models/newsdata.md new file mode 100644 index 00000000..94bf386a --- /dev/null +++ b/docs/models/newsdata.md @@ -0,0 +1,8 @@ +# Newsdata + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `NEWSDATA` | newsdata | \ No newline at end of file diff --git a/docs/models/nexiopay.md b/docs/models/nexiopay.md new file mode 100644 index 00000000..7a685496 --- /dev/null +++ b/docs/models/nexiopay.md @@ -0,0 +1,8 @@ +# Nexiopay + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `NEXIOPAY` | nexiopay | \ No newline at end of file diff --git a/docs/models/ninjaonermm.md b/docs/models/ninjaonermm.md new file mode 100644 index 00000000..9518ae1f --- /dev/null +++ b/docs/models/ninjaonermm.md @@ -0,0 +1,8 @@ +# NinjaoneRmm + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `NINJAONE_RMM` | ninjaone-rmm | \ No newline at end of file diff --git a/docs/models/nonet.md b/docs/models/nonet.md index 13e73566..498bbb9d 100644 --- a/docs/models/nonet.md +++ b/docs/models/nonet.md @@ -1,10 +1,9 @@ # NoneT -No authentication will be used - ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | -| `method` | [models.DestinationElasticsearchMethod](../models/destinationelasticsearchmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `storage_type` | [Optional[models.StorageType]](../models/storagetype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/not_.md b/docs/models/not_.md new file mode 100644 index 00000000..809c5aa1 --- /dev/null +++ b/docs/models/not_.md @@ -0,0 +1,9 @@ +# Not + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | +| `conditions` | List[[models.RowFilteringOperation](../models/rowfilteringoperation.md)] | :heavy_check_mark: | Conditions to evaluate with the NOT operator. | +| `type` | [models.RowFilteringOperationType](../models/rowfilteringoperationtype.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/notificationconfig.md b/docs/models/notificationconfig.md new file mode 100644 index 00000000..ad45ce5f --- /dev/null +++ b/docs/models/notificationconfig.md @@ -0,0 +1,11 @@ +# NotificationConfig + +Configures a notification. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | +| `email` | [Optional[models.EmailNotificationConfig]](../models/emailnotificationconfig.md) | :heavy_minus_sign: | Configures an email notification. | +| `webhook` | [Optional[models.WebhookNotificationConfig]](../models/webhooknotificationconfig.md) | :heavy_minus_sign: | Configures a webhook notification. | \ No newline at end of file diff --git a/docs/models/notificationsconfig.md b/docs/models/notificationsconfig.md new file mode 100644 index 00000000..972e5fc5 --- /dev/null +++ b/docs/models/notificationsconfig.md @@ -0,0 +1,15 @@ +# NotificationsConfig + +Configures workspace notifications. + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | +| `connection_update` | [Optional[models.NotificationConfig]](../models/notificationconfig.md) | :heavy_minus_sign: | Configures a notification. | +| `connection_update_action_required` | [Optional[models.NotificationConfig]](../models/notificationconfig.md) | :heavy_minus_sign: | Configures a notification. | +| `failure` | [Optional[models.NotificationConfig]](../models/notificationconfig.md) | :heavy_minus_sign: | Configures a notification. | +| `success` | [Optional[models.NotificationConfig]](../models/notificationconfig.md) | :heavy_minus_sign: | Configures a notification. | +| `sync_disabled` | [Optional[models.NotificationConfig]](../models/notificationconfig.md) | :heavy_minus_sign: | Configures a notification. | +| `sync_disabled_warning` | [Optional[models.NotificationConfig]](../models/notificationconfig.md) | :heavy_minus_sign: | Configures a notification. | \ No newline at end of file diff --git a/docs/models/notunnel.md b/docs/models/notunnel.md index de3dff89..f0e0e40c 100644 --- a/docs/models/notunnel.md +++ b/docs/models/notunnel.md @@ -1,8 +1,11 @@ # NoTunnel +No ssh tunnel needed to connect to database + ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------ | ------------------------------------------------ | ------------------------------------------------ | ------------------------------------------------ | -| `tunnel_method` | [models.TunnelMethod](../models/tunnelmethod.md) | :heavy_check_mark: | No ssh tunnel needed to connect to database | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `tunnel_method` | [Optional[models.TunnelMethod]](../models/tunnelmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/oauth.md b/docs/models/oauth.md index 9f531510..2f8d26bc 100644 --- a/docs/models/oauth.md +++ b/docs/models/oauth.md @@ -3,9 +3,10 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `access_token` | *str* | :heavy_check_mark: | OAuth access token | -| `client_id` | *Optional[str]* | :heavy_minus_sign: | OAuth Client Id | -| `client_secret` | *Optional[str]* | :heavy_minus_sign: | OAuth Client secret | -| `option_title` | [Optional[models.OptionTitle]](../models/optiontitle.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `client_id` | *str* | :heavy_check_mark: | The Client ID of your HubSpot developer application. See the Hubspot docs if you need help finding this ID. | +| `client_secret` | *str* | :heavy_check_mark: | The client secret for your HubSpot developer application. See the Hubspot docs if you need help finding this secret. | +| `refresh_token` | *str* | :heavy_check_mark: | Refresh token to renew an expired access token. See the Hubspot docs if you need help finding this token. | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `type` | [Optional[models.Type]](../models/type.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/oauth2accesstoken.md b/docs/models/oauth2accesstoken.md index e5d7d5f2..558c88bb 100644 --- a/docs/models/oauth2accesstoken.md +++ b/docs/models/oauth2accesstoken.md @@ -6,4 +6,4 @@ | Field | Type | Required | Description | Example | | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `access_token` | *str* | :heavy_check_mark: | Also called API Access Token The access token used to call the Auth0 Management API Token. It's a JWT that contains specific grant permissions knowns as scopes. | | -| `auth_type` | [models.SourceAuth0SchemasCredentialsAuthenticationMethod](../models/sourceauth0schemascredentialsauthenticationmethod.md) | :heavy_check_mark: | N/A | oauth2_access_token | \ No newline at end of file +| `auth_type` | [models.SourceAuth0SchemasAuthenticationMethod](../models/sourceauth0schemasauthenticationmethod.md) | :heavy_check_mark: | N/A | oauth2_access_token | \ No newline at end of file diff --git a/docs/models/oauth2authentication.md b/docs/models/oauth2authentication.md new file mode 100644 index 00000000..03001a88 --- /dev/null +++ b/docs/models/oauth2authentication.md @@ -0,0 +1,14 @@ +# OAuth2Authentication + +Authenticate using OAuth2. This requires a consumer key, the private part of the certificate with which netsuite OAuth2 Client Credentials was setup and the certificate ID for the OAuth2 setup entry. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `client_id` | *str* | :heavy_check_mark: | The consumer key used for OAuth2 authentication. This is generated in NetSuite when creating an integration record. | +| `key_id` | *str* | :heavy_check_mark: | The certificate ID for the OAuth 2.0 Client Credentials Setup entry. | +| `oauth2_private_key` | *str* | :heavy_check_mark: | The private portion of the certificate with which OAuth2 was setup. ( created with openssl req -new -x509 -newkey rsa:4096 -keyout private.pem -sigopt rsa_padding_mode:pss -sha256 -sigopt rsa_pss_saltlen:64 -out public.pem -nodes -days 365 ) | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `authentication_method` | [Optional[models.SourceNetsuiteEnterpriseSchemasAuthenticationMethodAuthenticationMethod]](../models/sourcenetsuiteenterpriseschemasauthenticationmethodauthenticationmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/oauth2confidentialapplication.md b/docs/models/oauth2confidentialapplication.md index df03da4a..845cefe2 100644 --- a/docs/models/oauth2confidentialapplication.md +++ b/docs/models/oauth2confidentialapplication.md @@ -8,4 +8,4 @@ | `audience` | *str* | :heavy_check_mark: | The audience for the token, which is your API. You can find this in the Identifier field on your API's settings tab | https://dev-yourOrg.us.auth0.com/api/v2/ | | `client_id` | *str* | :heavy_check_mark: | Your application's Client ID. You can find this value on the application's settings tab after you login the admin portal. | Client_ID | | `client_secret` | *str* | :heavy_check_mark: | Your application's Client Secret. You can find this value on the application's settings tab after you login the admin portal. | Client_Secret | -| `auth_type` | [models.SourceAuth0SchemasAuthenticationMethod](../models/sourceauth0schemasauthenticationmethod.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| `auth_type` | [models.SourceAuth0SchemasCredentialsAuthenticationMethod](../models/sourceauth0schemascredentialsauthenticationmethod.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/oauthactornames.md b/docs/models/oauthactornames.md index fe72ce5c..7c0a3912 100644 --- a/docs/models/oauthactornames.md +++ b/docs/models/oauthactornames.md @@ -13,6 +13,7 @@ | `BING_ADS` | bing-ads | | `DRIFT` | drift | | `FACEBOOK_MARKETING` | facebook-marketing | +| `FACEBOOK_PAGES` | facebook-pages | | `GCS` | gcs | | `GITHUB` | github | | `GITLAB` | gitlab | @@ -35,15 +36,14 @@ | `PINTEREST` | pinterest | | `RD_STATION_MARKETING` | rd-station-marketing | | `SALESFORCE` | salesforce | +| `SHAREPOINT_ENTERPRISE` | sharepoint-enterprise | | `SLACK` | slack | | `SMARTSHEETS` | smartsheets | | `SNAPCHAT_MARKETING` | snapchat-marketing | -| `SNOWFLAKE` | snowflake | | `SURVEYMONKEY` | surveymonkey | | `TIKTOK_MARKETING` | tiktok-marketing | | `TRELLO` | trello | | `TYPEFORM` | typeform | | `YOUTUBE_ANALYTICS` | youtube-analytics | -| `ZENDESK_CHAT` | zendesk-chat | | `ZENDESK_SUPPORT` | zendesk-support | | `ZENDESK_TALK` | zendesk-talk | \ No newline at end of file diff --git a/docs/models/oauthcredentialsconfiguration.md b/docs/models/oauthcredentialsconfiguration.md deleted file mode 100644 index 8983c654..00000000 --- a/docs/models/oauthcredentialsconfiguration.md +++ /dev/null @@ -1,265 +0,0 @@ -# OAuthCredentialsConfiguration - -The values required to configure the source. - - -## Supported Types - -### `models.Airtable` - -```python -value: models.Airtable = /* values here */ -``` - -### `models.AmazonAds` - -```python -value: models.AmazonAds = /* values here */ -``` - -### `models.AmazonSellerPartner` - -```python -value: models.AmazonSellerPartner = /* values here */ -``` - -### `models.Asana` - -```python -value: models.Asana = /* values here */ -``` - -### `models.AzureBlobStorage` - -```python -value: models.AzureBlobStorage = /* values here */ -``` - -### `models.BingAds` - -```python -value: models.BingAds = /* values here */ -``` - -### `models.Drift` - -```python -value: models.Drift = /* values here */ -``` - -### `models.FacebookMarketing` - -```python -value: models.FacebookMarketing = /* values here */ -``` - -### `models.Gcs` - -```python -value: models.Gcs = /* values here */ -``` - -### `models.Github` - -```python -value: models.Github = /* values here */ -``` - -### `models.Gitlab` - -```python -value: models.Gitlab = /* values here */ -``` - -### `models.GoogleAds` - -```python -value: models.GoogleAds = /* values here */ -``` - -### `models.GoogleAnalyticsDataAPI` - -```python -value: models.GoogleAnalyticsDataAPI = /* values here */ -``` - -### `models.GoogleDrive` - -```python -value: models.GoogleDrive = /* values here */ -``` - -### `models.GoogleSearchConsole` - -```python -value: models.GoogleSearchConsole = /* values here */ -``` - -### `models.GoogleSheets` - -```python -value: models.GoogleSheets = /* values here */ -``` - -### `models.Hubspot` - -```python -value: models.Hubspot = /* values here */ -``` - -### `models.Instagram` - -```python -value: models.Instagram = /* values here */ -``` - -### `models.Intercom` - -```python -value: models.Intercom = /* values here */ -``` - -### `models.LeverHiring` - -```python -value: models.LeverHiring = /* values here */ -``` - -### `models.LinkedinAds` - -```python -value: models.LinkedinAds = /* values here */ -``` - -### `models.Mailchimp` - -```python -value: models.Mailchimp = /* values here */ -``` - -### `models.MicrosoftOnedrive` - -```python -value: models.MicrosoftOnedrive = /* values here */ -``` - -### `models.MicrosoftSharepoint` - -```python -value: models.MicrosoftSharepoint = /* values here */ -``` - -### `models.MicrosoftTeams` - -```python -value: models.MicrosoftTeams = /* values here */ -``` - -### `models.Monday` - -```python -value: models.Monday = /* values here */ -``` - -### `models.Notion` - -```python -value: models.Notion = /* values here */ -``` - -### `models.Pinterest` - -```python -value: models.Pinterest = /* values here */ -``` - -### `models.RdStationMarketing` - -```python -value: models.RdStationMarketing = /* values here */ -``` - -### `models.Salesforce` - -```python -value: models.Salesforce = /* values here */ -``` - -### `models.Shopify` - -```python -value: models.Shopify = /* values here */ -``` - -### `models.Slack` - -```python -value: models.Slack = /* values here */ -``` - -### `models.Smartsheets` - -```python -value: models.Smartsheets = /* values here */ -``` - -### `models.SnapchatMarketing` - -```python -value: models.SnapchatMarketing = /* values here */ -``` - -### `models.Snowflake` - -```python -value: models.Snowflake = /* values here */ -``` - -### `models.Surveymonkey` - -```python -value: models.Surveymonkey = /* values here */ -``` - -### `models.TiktokMarketing` - -```python -value: models.TiktokMarketing = /* values here */ -``` - -### `Any` - -```python -value: Any = /* values here */ -``` - -### `models.Typeform` - -```python -value: models.Typeform = /* values here */ -``` - -### `models.YoutubeAnalytics` - -```python -value: models.YoutubeAnalytics = /* values here */ -``` - -### `models.ZendeskChat` - -```python -value: models.ZendeskChat = /* values here */ -``` - -### `models.ZendeskSupport` - -```python -value: models.ZendeskSupport = /* values here */ -``` - -### `models.ZendeskTalk` - -```python -value: models.ZendeskTalk = /* values here */ -``` - diff --git a/docs/models/objectstorageconfiguration.md b/docs/models/objectstorageconfiguration.md new file mode 100644 index 00000000..1d89ee13 --- /dev/null +++ b/docs/models/objectstorageconfiguration.md @@ -0,0 +1,17 @@ +# ObjectStorageConfiguration + + +## Supported Types + +### `models.NoneT` + +```python +value: models.NoneT = /* values here */ +``` + +### `models.DestinationCustomerIoS3` + +```python +value: models.DestinationCustomerIoS3 = /* values here */ +``` + diff --git a/docs/models/onehundredms.md b/docs/models/onehundredms.md new file mode 100644 index 00000000..7f99dd39 --- /dev/null +++ b/docs/models/onehundredms.md @@ -0,0 +1,8 @@ +# OneHundredms + + +## Values + +| Name | Value | +| --------------- | --------------- | +| `ONE_HUNDREDMS` | 100ms | \ No newline at end of file diff --git a/docs/models/openai.md b/docs/models/openai.md index 4c9b0cae..75c0620d 100644 --- a/docs/models/openai.md +++ b/docs/models/openai.md @@ -5,7 +5,7 @@ Use the OpenAI API to embed text. This option is using the text-embedding-ada-00 ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | -| `openai_key` | *str* | :heavy_check_mark: | N/A | -| `mode` | [Optional[models.DestinationAstraSchemasEmbeddingEmbedding1Mode]](../models/destinationastraschemasembeddingembedding1mode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | +| `openai_key` | *str* | :heavy_check_mark: | N/A | +| `mode` | [Optional[models.DestinationAstraMode]](../models/destinationastramode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/openaicompatible.md b/docs/models/openaicompatible.md index 5245b4aa..f9eedae9 100644 --- a/docs/models/openaicompatible.md +++ b/docs/models/openaicompatible.md @@ -5,10 +5,10 @@ Use a service that's compatible with the OpenAI API to embed text. ## Fields -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | -| `base_url` | *str* | :heavy_check_mark: | The base URL for your OpenAI-compatible service | https://your-service-name.com | -| `dimensions` | *int* | :heavy_check_mark: | The number of dimensions the embedding model is generating | 1536 | -| `api_key` | *Optional[str]* | :heavy_minus_sign: | N/A | | -| `mode` | [Optional[models.DestinationAstraSchemasEmbeddingEmbeddingMode]](../models/destinationastraschemasembeddingembeddingmode.md) | :heavy_minus_sign: | N/A | | -| `model_name` | *Optional[str]* | :heavy_minus_sign: | The name of the model to use for embedding | text-embedding-ada-002 | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | +| `base_url` | *str* | :heavy_check_mark: | The base URL for your OpenAI-compatible service | https://your-service-name.com | +| `dimensions` | *int* | :heavy_check_mark: | The number of dimensions the embedding model is generating | 1536 | +| `api_key` | *Optional[str]* | :heavy_minus_sign: | N/A | | +| `mode` | [Optional[models.DestinationAstraSchemasEmbeddingEmbedding5Mode]](../models/destinationastraschemasembeddingembedding5mode.md) | :heavy_minus_sign: | N/A | | +| `model_name` | *Optional[str]* | :heavy_minus_sign: | The name of the model to use for embedding | text-embedding-ada-002 | \ No newline at end of file diff --git a/docs/models/openexchangerates.md b/docs/models/openexchangerates.md new file mode 100644 index 00000000..ef4c4b7a --- /dev/null +++ b/docs/models/openexchangerates.md @@ -0,0 +1,8 @@ +# OpenExchangeRates + + +## Values + +| Name | Value | +| --------------------- | --------------------- | +| `OPEN_EXCHANGE_RATES` | open-exchange-rates | \ No newline at end of file diff --git a/docs/models/opuswatch.md b/docs/models/opuswatch.md new file mode 100644 index 00000000..ca5d20a2 --- /dev/null +++ b/docs/models/opuswatch.md @@ -0,0 +1,8 @@ +# Opuswatch + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `OPUSWATCH` | opuswatch | \ No newline at end of file diff --git a/docs/models/oracleenterprise.md b/docs/models/oracleenterprise.md new file mode 100644 index 00000000..b57ccfdd --- /dev/null +++ b/docs/models/oracleenterprise.md @@ -0,0 +1,8 @@ +# OracleEnterprise + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `ORACLE_ENTERPRISE` | oracle-enterprise | \ No newline at end of file diff --git a/docs/models/organizationoauthcredentialsrequest.md b/docs/models/organizationoauthcredentialsrequest.md new file mode 100644 index 00000000..21398785 --- /dev/null +++ b/docs/models/organizationoauthcredentialsrequest.md @@ -0,0 +1,12 @@ +# OrganizationOAuthCredentialsRequest + +POST body for creating/updating organization level OAuth credentials + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | +| `actor_type` | [models.ActorTypeEnum](../models/actortypeenum.md) | :heavy_check_mark: | Whether you're setting this override for a source or destination | +| `configuration` | *Any* | :heavy_check_mark: | The values required to configure the source. | +| `name` | *str* | :heavy_check_mark: | The name of the source i.e. google-ads | \ No newline at end of file diff --git a/docs/models/outputformat.md b/docs/models/outputformat.md index d8aae9b4..5dd144b6 100644 --- a/docs/models/outputformat.md +++ b/docs/models/outputformat.md @@ -1,6 +1,6 @@ # OutputFormat -Output data format +Format of the data output. ## Supported Types diff --git a/docs/models/outputsize.md b/docs/models/outputsize.md new file mode 100644 index 00000000..6deac89a --- /dev/null +++ b/docs/models/outputsize.md @@ -0,0 +1,12 @@ +# OutputSize + +Whether to return full or compact data (the last 100 data points). + + + +## Values + +| Name | Value | +| --------- | --------- | +| `COMPACT` | compact | +| `FULL` | full | \ No newline at end of file diff --git a/docs/models/padding.md b/docs/models/padding.md new file mode 100644 index 00000000..d5245f7c --- /dev/null +++ b/docs/models/padding.md @@ -0,0 +1,9 @@ +# Padding + + +## Values + +| Name | Value | +| --------------- | --------------- | +| `NO_PADDING` | NoPadding | +| `PKCS5_PADDING` | PKCS5Padding | \ No newline at end of file diff --git a/docs/models/paddle.md b/docs/models/paddle.md new file mode 100644 index 00000000..3456cc4c --- /dev/null +++ b/docs/models/paddle.md @@ -0,0 +1,8 @@ +# Paddle + + +## Values + +| Name | Value | +| -------- | -------- | +| `PADDLE` | paddle | \ No newline at end of file diff --git a/docs/models/pagerduty.md b/docs/models/pagerduty.md new file mode 100644 index 00000000..fab0092e --- /dev/null +++ b/docs/models/pagerduty.md @@ -0,0 +1,8 @@ +# Pagerduty + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `PAGERDUTY` | pagerduty | \ No newline at end of file diff --git a/docs/models/partnerize.md b/docs/models/partnerize.md new file mode 100644 index 00000000..1d9412f0 --- /dev/null +++ b/docs/models/partnerize.md @@ -0,0 +1,8 @@ +# Partnerize + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `PARTNERIZE` | partnerize | \ No newline at end of file diff --git a/docs/models/partnerstack.md b/docs/models/partnerstack.md new file mode 100644 index 00000000..961d9069 --- /dev/null +++ b/docs/models/partnerstack.md @@ -0,0 +1,8 @@ +# Partnerstack + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `PARTNERSTACK` | partnerstack | \ No newline at end of file diff --git a/docs/models/passwordauthentication.md b/docs/models/passwordauthentication.md index 88fc0bf6..fdb7165a 100644 --- a/docs/models/passwordauthentication.md +++ b/docs/models/passwordauthentication.md @@ -1,12 +1,15 @@ # PasswordAuthentication +Connect through a jump server tunnel host using username and password authentication + ## Fields -| Field | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | | -| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host | | -| `tunnel_user_password` | *str* | :heavy_check_mark: | OS-level password for logging into the jump server host | | -| `tunnel_method` | [models.DestinationClickhouseSchemasTunnelMethod](../models/destinationclickhouseschemastunnelmethod.md) | :heavy_check_mark: | Connect through a jump server tunnel host using username and password authentication | | -| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | 22 | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | +| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | +| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host | +| `tunnel_user_password` | *str* | :heavy_check_mark: | OS-level password for logging into the jump server host | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `tunnel_method` | [Optional[models.DestinationClickhouseSchemasTunnelMethod]](../models/destinationclickhouseschemastunnelmethod.md) | :heavy_minus_sign: | N/A | +| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | \ No newline at end of file diff --git a/docs/models/payfit.md b/docs/models/payfit.md new file mode 100644 index 00000000..b47480b0 --- /dev/null +++ b/docs/models/payfit.md @@ -0,0 +1,8 @@ +# Payfit + + +## Values + +| Name | Value | +| -------- | -------- | +| `PAYFIT` | payfit | \ No newline at end of file diff --git a/docs/models/perigon.md b/docs/models/perigon.md new file mode 100644 index 00000000..276bfb3e --- /dev/null +++ b/docs/models/perigon.md @@ -0,0 +1,8 @@ +# Perigon + + +## Values + +| Name | Value | +| --------- | --------- | +| `PERIGON` | perigon | \ No newline at end of file diff --git a/docs/models/phyllo.md b/docs/models/phyllo.md new file mode 100644 index 00000000..88429907 --- /dev/null +++ b/docs/models/phyllo.md @@ -0,0 +1,8 @@ +# Phyllo + + +## Values + +| Name | Value | +| -------- | -------- | +| `PHYLLO` | phyllo | \ No newline at end of file diff --git a/docs/models/pingdom.md b/docs/models/pingdom.md new file mode 100644 index 00000000..e35ed927 --- /dev/null +++ b/docs/models/pingdom.md @@ -0,0 +1,8 @@ +# Pingdom + + +## Values + +| Name | Value | +| --------- | --------- | +| `PINGDOM` | pingdom | \ No newline at end of file diff --git a/docs/models/pokemonname.md b/docs/models/pokemonname.md index 923c5aa9..b787ab0f 100644 --- a/docs/models/pokemonname.md +++ b/docs/models/pokemonname.md @@ -35,10 +35,10 @@ Pokemon requested from the API. | `RAICHU` | raichu | | `SANDSHREW` | sandshrew | | `SANDSLASH` | sandslash | -| `NIDORANF` | nidoranf | +| `NIDORAN_F` | nidoran-f | | `NIDORINA` | nidorina | | `NIDOQUEEN` | nidoqueen | -| `NIDORANM` | nidoranm | +| `NIDORAN_M` | nidoran-m | | `NIDORINO` | nidorino | | `NIDOKING` | nidoking | | `CLEFAIRY` | clefairy | diff --git a/docs/models/poplar.md b/docs/models/poplar.md new file mode 100644 index 00000000..a67d7629 --- /dev/null +++ b/docs/models/poplar.md @@ -0,0 +1,8 @@ +# Poplar + + +## Values + +| Name | Value | +| -------- | -------- | +| `POPLAR` | poplar | \ No newline at end of file diff --git a/docs/models/prefer.md b/docs/models/prefer.md index 50a46c22..6d58aa66 100644 --- a/docs/models/prefer.md +++ b/docs/models/prefer.md @@ -5,6 +5,6 @@ Prefer SSL mode. ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | -| `mode` | [Optional[models.DestinationPostgresSchemasMode]](../models/destinationpostgresschemasmode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | +| `mode` | [Optional[models.DestinationPostgresSchemasSslModeMode]](../models/destinationpostgresschemassslmodemode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/preferred.md b/docs/models/preferred.md index 3436785e..99ef95a3 100644 --- a/docs/models/preferred.md +++ b/docs/models/preferred.md @@ -1,10 +1,11 @@ # Preferred -Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection. +To allow unencrypted communication only when the source doesn't support encryption. ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------ | ------------------------------------------------------ | ------------------------------------------------------ | ------------------------------------------------------ | -| `mode` | [models.SourceMysqlMode](../models/sourcemysqlmode.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `mode` | [Optional[models.SourceMysqlMode]](../models/sourcemysqlmode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/printify.md b/docs/models/printify.md new file mode 100644 index 00000000..dcfddd35 --- /dev/null +++ b/docs/models/printify.md @@ -0,0 +1,8 @@ +# Printify + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `PRINTIFY` | printify | \ No newline at end of file diff --git a/docs/models/protocol.md b/docs/models/protocol.md new file mode 100644 index 00000000..84d79360 --- /dev/null +++ b/docs/models/protocol.md @@ -0,0 +1,11 @@ +# Protocol + +Protocol for the database connection string. + + +## Values + +| Name | Value | +| ------- | ------- | +| `HTTP` | http | +| `HTTPS` | https | \ No newline at end of file diff --git a/docs/models/raas.md b/docs/models/raas.md new file mode 100644 index 00000000..43d15a15 --- /dev/null +++ b/docs/models/raas.md @@ -0,0 +1,8 @@ +# Raas + + +## Values + +| Name | Value | +| ------ | ------ | +| `RAAS` | RAAS | \ No newline at end of file diff --git a/docs/models/refreshtokenendpoint.md b/docs/models/refreshtokenendpoint.md new file mode 100644 index 00000000..2bf73a2c --- /dev/null +++ b/docs/models/refreshtokenendpoint.md @@ -0,0 +1,9 @@ +# RefreshTokenEndpoint + + +## Values + +| Name | Value | +| ----------------------------------------------------- | ----------------------------------------------------- | +| `HTTPS_API_SANDBOX_EBAY_COM_IDENTITY_V1_OAUTH2_TOKEN` | https://api.sandbox.ebay.com/identity/v1/oauth2/token | +| `HTTPS_API_EBAY_COM_IDENTITY_V1_OAUTH2_TOKEN` | https://api.ebay.com/identity/v1/oauth2/token | \ No newline at end of file diff --git a/docs/models/replicatepermissionsacl.md b/docs/models/replicatepermissionsacl.md new file mode 100644 index 00000000..d0cc2c1b --- /dev/null +++ b/docs/models/replicatepermissionsacl.md @@ -0,0 +1,12 @@ +# ReplicatePermissionsACL + +Sends one identity stream and one for more permissions (ACL) streams to the destination. This data can be used in downstream systems to recreate permission restrictions mirroring the original source. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------- | +| `delivery_type` | [Optional[models.SourceGoogleDriveSchemasDeliveryType]](../models/sourcegoogledriveschemasdeliverytype.md) | :heavy_minus_sign: | N/A | +| `domain` | *Optional[str]* | :heavy_minus_sign: | The Google domain of the identities. | +| `include_identities_stream` | *Optional[bool]* | :heavy_minus_sign: | This data can be used in downstream systems to recreate permission restrictions mirroring the original source | \ No newline at end of file diff --git a/docs/models/reportbasedstreams.md b/docs/models/reportbasedstreams.md new file mode 100644 index 00000000..8900adf5 --- /dev/null +++ b/docs/models/reportbasedstreams.md @@ -0,0 +1,11 @@ +# ReportBasedStreams + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `password` | *str* | :heavy_check_mark: | N/A | | +| `report_ids` | List[*Any*] | :heavy_check_mark: | Report IDs can be found by clicking the three dots on the right side of the report > Web Service > View URLs > in JSON url copy everything between Workday tenant/ and ?format=json. | for JSON url https://hostname/ccx/service/customreport2/tenant/report/id?format=json Report ID is report/id. | +| `username` | *str* | :heavy_check_mark: | N/A | | +| `auth_type` | [models.Raas](../models/raas.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/reportname.md b/docs/models/reportname.md index a7162f46..ef7fd879 100644 --- a/docs/models/reportname.md +++ b/docs/models/reportname.md @@ -41,12 +41,4 @@ | `GET_V2_SETTLEMENT_REPORT_DATA_FLAT_FILE` | GET_V2_SETTLEMENT_REPORT_DATA_FLAT_FILE | | `GET_XML_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL` | GET_XML_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL | | `GET_XML_BROWSE_TREE_DATA` | GET_XML_BROWSE_TREE_DATA | -| `GET_VENDOR_REAL_TIME_INVENTORY_REPORT` | GET_VENDOR_REAL_TIME_INVENTORY_REPORT | -| `GET_BRAND_ANALYTICS_MARKET_BASKET_REPORT` | GET_BRAND_ANALYTICS_MARKET_BASKET_REPORT | -| `GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT` | GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT | -| `GET_BRAND_ANALYTICS_REPEAT_PURCHASE_REPORT` | GET_BRAND_ANALYTICS_REPEAT_PURCHASE_REPORT | -| `GET_SALES_AND_TRAFFIC_REPORT` | GET_SALES_AND_TRAFFIC_REPORT | -| `GET_VENDOR_SALES_REPORT` | GET_VENDOR_SALES_REPORT | -| `GET_VENDOR_INVENTORY_REPORT` | GET_VENDOR_INVENTORY_REPORT | -| `GET_VENDOR_NET_PURE_PRODUCT_MARGIN_REPORT` | GET_VENDOR_NET_PURE_PRODUCT_MARGIN_REPORT | -| `GET_VENDOR_TRAFFIC_REPORT` | GET_VENDOR_TRAFFIC_REPORT | \ No newline at end of file +| `GET_VENDOR_REAL_TIME_INVENTORY_REPORT` | GET_VENDOR_REAL_TIME_INVENTORY_REPORT | \ No newline at end of file diff --git a/docs/models/reportrecordtypeenum.md b/docs/models/reportrecordtypeenum.md deleted file mode 100644 index 09c75d19..00000000 --- a/docs/models/reportrecordtypeenum.md +++ /dev/null @@ -1,17 +0,0 @@ -# ReportRecordTypeEnum - -An enumeration. - - -## Values - -| Name | Value | -| ---------------- | ---------------- | -| `AD_GROUPS` | adGroups | -| `ASINS` | asins | -| `ASINS_KEYWORDS` | asins_keywords | -| `ASINS_TARGETS` | asins_targets | -| `CAMPAIGNS` | campaigns | -| `KEYWORDS` | keywords | -| `PRODUCT_ADS` | productAds | -| `TARGETS` | targets | \ No newline at end of file diff --git a/docs/models/require.md b/docs/models/require.md index d71cb4cc..6678dbe5 100644 --- a/docs/models/require.md +++ b/docs/models/require.md @@ -5,6 +5,6 @@ Require SSL mode. ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | -| `mode` | [Optional[models.DestinationPostgresSchemasSslModeMode]](../models/destinationpostgresschemassslmodemode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | +| `mode` | [Optional[models.DestinationPostgresSchemasSSLModeSSLModesMode]](../models/destinationpostgresschemassslmodesslmodesmode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/required.md b/docs/models/required.md index 1f891bbe..23e867cc 100644 --- a/docs/models/required.md +++ b/docs/models/required.md @@ -1,10 +1,11 @@ # Required -Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified. +To always require encryption. Note: The connection will fail if the source doesn't support encryption. ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -| `mode` | [models.SourceMysqlSchemasMode](../models/sourcemysqlschemasmode.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `mode` | [Optional[models.SourceMysqlSchemasMode]](../models/sourcemysqlschemasmode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/resolution.md b/docs/models/resolution.md new file mode 100644 index 00000000..e7460ed6 --- /dev/null +++ b/docs/models/resolution.md @@ -0,0 +1,10 @@ +# Resolution + + +## Values + +| Name | Value | +| ------ | ------ | +| `HOUR` | hour | +| `DAY` | day | +| `WEEK` | week | \ No newline at end of file diff --git a/docs/models/resourcerequirements.md b/docs/models/resourcerequirements.md new file mode 100644 index 00000000..e44182b9 --- /dev/null +++ b/docs/models/resourcerequirements.md @@ -0,0 +1,15 @@ +# ResourceRequirements + +optional resource requirements to run workers (blank for unbounded allocations) + + +## Fields + +| Field | Type | Required | Description | +| --------------------------- | --------------------------- | --------------------------- | --------------------------- | +| `cpu_limit` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `cpu_request` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `ephemeral_storage_limit` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `ephemeral_storage_request` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `memory_limit` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `memory_request` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/rest.md b/docs/models/rest.md new file mode 100644 index 00000000..dfc37a3b --- /dev/null +++ b/docs/models/rest.md @@ -0,0 +1,8 @@ +# Rest + + +## Values + +| Name | Value | +| ------ | ------ | +| `REST` | REST | \ No newline at end of file diff --git a/docs/models/restapistreams.md b/docs/models/restapistreams.md new file mode 100644 index 00000000..3a59fae9 --- /dev/null +++ b/docs/models/restapistreams.md @@ -0,0 +1,10 @@ +# RESTAPIStreams + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | +| `access_token` | *str* | :heavy_check_mark: | Follow the instructions in the "OAuth 2.0 in Postman - API Client for Integrations" article in the Workday community docs to obtain access token. | | +| `auth_type` | [models.Rest](../models/rest.md) | :heavy_check_mark: | N/A | | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | Rows after this date will be synced, default 2 years ago. | 2024-10-26T07:00:00.000Z | \ No newline at end of file diff --git a/docs/models/restcatalog.md b/docs/models/restcatalog.md index c0d5a9ce..944cfaba 100644 --- a/docs/models/restcatalog.md +++ b/docs/models/restcatalog.md @@ -1,13 +1,13 @@ -# RESTCatalog +# RestCatalog -The RESTCatalog connects to a REST server at the specified URI +Configuration details for connecting to a REST catalog. ## Fields -| Field | Type | Required | Description | Example | -| ----------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `rest_uri` | *str* | :heavy_check_mark: | N/A | http://localhost:12345 | -| `catalog_type` | [Optional[models.DestinationIcebergSchemasCatalogConfigCatalogType]](../models/destinationicebergschemascatalogconfigcatalogtype.md) | :heavy_minus_sign: | N/A | | -| `rest_credential` | *Optional[str]* | :heavy_minus_sign: | N/A | username:password | -| `rest_token` | *Optional[str]* | :heavy_minus_sign: | N/A | eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `namespace` | *str* | :heavy_check_mark: | The namespace to be used in the Table identifier.
This will ONLY be used if the `Destination Namespace` setting for the connection is set to
`Destination-defined` or `Source-defined` | +| `server_uri` | *str* | :heavy_check_mark: | The base URL of the Rest server used to connect to the Rest catalog. | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `catalog_type` | [Optional[models.DestinationS3DataLakeSchemasCatalogType]](../models/destinations3datalakeschemascatalogtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/retailexpressbymaropost.md b/docs/models/retailexpressbymaropost.md new file mode 100644 index 00000000..aac65ee9 --- /dev/null +++ b/docs/models/retailexpressbymaropost.md @@ -0,0 +1,8 @@ +# RetailexpressByMaropost + + +## Values + +| Name | Value | +| --------------------------- | --------------------------- | +| `RETAILEXPRESS_BY_MAROPOST` | retailexpress-by-maropost | \ No newline at end of file diff --git a/docs/models/ringcentral.md b/docs/models/ringcentral.md new file mode 100644 index 00000000..931841ae --- /dev/null +++ b/docs/models/ringcentral.md @@ -0,0 +1,8 @@ +# Ringcentral + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `RINGCENTRAL` | ringcentral | \ No newline at end of file diff --git a/docs/models/rocketchat.md b/docs/models/rocketchat.md new file mode 100644 index 00000000..e7525670 --- /dev/null +++ b/docs/models/rocketchat.md @@ -0,0 +1,8 @@ +# RocketChat + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `ROCKET_CHAT` | rocket-chat | \ No newline at end of file diff --git a/docs/models/rowfiltering.md b/docs/models/rowfiltering.md new file mode 100644 index 00000000..333597dd --- /dev/null +++ b/docs/models/rowfiltering.md @@ -0,0 +1,8 @@ +# RowFiltering + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------ | ------------------------------------------------------------------ | ------------------------------------------------------------------ | ------------------------------------------------------------------ | +| `conditions` | [models.RowFilteringOperation](../models/rowfilteringoperation.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/rowfilteringoperation.md b/docs/models/rowfilteringoperation.md new file mode 100644 index 00000000..7282c472 --- /dev/null +++ b/docs/models/rowfilteringoperation.md @@ -0,0 +1,17 @@ +# RowFilteringOperation + + +## Supported Types + +### `models.Equal` + +```python +value: models.Equal = /* values here */ +``` + +### `models.Not` + +```python +value: models.Not = /* values here */ +``` + diff --git a/docs/models/rowfilteringoperationtype.md b/docs/models/rowfilteringoperationtype.md new file mode 100644 index 00000000..749ae389 --- /dev/null +++ b/docs/models/rowfilteringoperationtype.md @@ -0,0 +1,9 @@ +# RowFilteringOperationType + + +## Values + +| Name | Value | +| ------- | ------- | +| `EQUAL` | EQUAL | +| `NOT` | NOT | \ No newline at end of file diff --git a/docs/models/s3datalake.md b/docs/models/s3datalake.md new file mode 100644 index 00000000..8ca7cf82 --- /dev/null +++ b/docs/models/s3datalake.md @@ -0,0 +1,8 @@ +# S3DataLake + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `S3_DATA_LAKE` | s3-data-lake | \ No newline at end of file diff --git a/docs/models/saphanaenterprise.md b/docs/models/saphanaenterprise.md new file mode 100644 index 00000000..85b81c5b --- /dev/null +++ b/docs/models/saphanaenterprise.md @@ -0,0 +1,8 @@ +# SapHanaEnterprise + + +## Values + +| Name | Value | +| --------------------- | --------------------- | +| `SAP_HANA_ENTERPRISE` | sap-hana-enterprise | \ No newline at end of file diff --git a/docs/models/scanchangeswithuserdefinedcursor.md b/docs/models/scanchangeswithuserdefinedcursor.md index b74b0731..52e07e43 100644 --- a/docs/models/scanchangeswithuserdefinedcursor.md +++ b/docs/models/scanchangeswithuserdefinedcursor.md @@ -5,6 +5,7 @@ Incrementally detects new inserts and updates using the ads_management, ads_read, read_insights, business_management. Then click on "Get token". See the docs for more information. | -| `auth_type` | [Optional[models.SourceFacebookMarketingSchemasAuthType]](../models/sourcefacebookmarketingschemasauthtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `service_account_info` | *str* | :heavy_check_mark: | Enter your service account key in JSON format. See the docs for more information on how to generate this key. | +| `auth_type` | [Optional[models.DestinationGoogleSheetsSchemasAuthType]](../models/destinationgooglesheetsschemasauthtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/servicedetails.md b/docs/models/servicedetails.md new file mode 100644 index 00000000..a8e9ce96 --- /dev/null +++ b/docs/models/servicedetails.md @@ -0,0 +1,11 @@ +# ServiceDetails + + +## Values + +| Name | Value | +| ------------------------------------- | ------------------------------------- | +| `ESCALATION_POLICIES` | escalation_policies | +| `TEAMS` | teams | +| `INTEGRATIONS` | integrations | +| `AUTO_PAUSE_NOTIFICATIONS_PARAMETERS` | auto_pause_notifications_parameters | \ No newline at end of file diff --git a/docs/models/servicenow.md b/docs/models/servicenow.md new file mode 100644 index 00000000..5be592c4 --- /dev/null +++ b/docs/models/servicenow.md @@ -0,0 +1,8 @@ +# ServiceNow + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `SERVICE_NOW` | service-now | \ No newline at end of file diff --git a/docs/models/sharepointenterprise.md b/docs/models/sharepointenterprise.md new file mode 100644 index 00000000..13728019 --- /dev/null +++ b/docs/models/sharepointenterprise.md @@ -0,0 +1,8 @@ +# SharepointEnterprise + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | +| `credentials` | [Optional[models.SharepointEnterpriseCredentials]](../models/sharepointenterprisecredentials.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sharepointenterprisecredentials.md b/docs/models/sharepointenterprisecredentials.md new file mode 100644 index 00000000..a955b810 --- /dev/null +++ b/docs/models/sharepointenterprisecredentials.md @@ -0,0 +1,9 @@ +# SharepointEnterpriseCredentials + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------- | ----------------------------------------------------- | ----------------------------------------------------- | ----------------------------------------------------- | +| `client_id` | *Optional[str]* | :heavy_minus_sign: | Client ID of your Microsoft developer application | +| `client_secret` | *Optional[str]* | :heavy_minus_sign: | Client Secret of your Microsoft developer application | \ No newline at end of file diff --git a/docs/models/shipstation.md b/docs/models/shipstation.md new file mode 100644 index 00000000..78470ba4 --- /dev/null +++ b/docs/models/shipstation.md @@ -0,0 +1,8 @@ +# Shipstation + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `SHIPSTATION` | shipstation | \ No newline at end of file diff --git a/docs/models/shopwired.md b/docs/models/shopwired.md new file mode 100644 index 00000000..dfd3bf4e --- /dev/null +++ b/docs/models/shopwired.md @@ -0,0 +1,8 @@ +# Shopwired + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `SHOPWIRED` | shopwired | \ No newline at end of file diff --git a/docs/models/shutterstock.md b/docs/models/shutterstock.md new file mode 100644 index 00000000..59a92028 --- /dev/null +++ b/docs/models/shutterstock.md @@ -0,0 +1,8 @@ +# Shutterstock + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `SHUTTERSTOCK` | shutterstock | \ No newline at end of file diff --git a/docs/models/signnow.md b/docs/models/signnow.md new file mode 100644 index 00000000..b6885cd0 --- /dev/null +++ b/docs/models/signnow.md @@ -0,0 +1,8 @@ +# Signnow + + +## Values + +| Name | Value | +| --------- | --------- | +| `SIGNNOW` | signnow | \ No newline at end of file diff --git a/docs/models/snowflake.md b/docs/models/snowflake.md index 989bcfdb..de2cf79e 100644 --- a/docs/models/snowflake.md +++ b/docs/models/snowflake.md @@ -1,8 +1,8 @@ # Snowflake -## Fields +## Values -| Field | Type | Required | Description | -| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -| `credentials` | [Optional[models.SnowflakeCredentials]](../models/snowflakecredentials.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Name | Value | +| ----------- | ----------- | +| `SNOWFLAKE` | snowflake | \ No newline at end of file diff --git a/docs/models/snowflakecredentials.md b/docs/models/snowflakecredentials.md deleted file mode 100644 index 2753afa5..00000000 --- a/docs/models/snowflakecredentials.md +++ /dev/null @@ -1,9 +0,0 @@ -# SnowflakeCredentials - - -## Fields - -| Field | Type | Required | Description | -| ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | -| `client_id` | *Optional[str]* | :heavy_minus_sign: | The Client ID of your Snowflake developer application. | -| `client_secret` | *Optional[str]* | :heavy_minus_sign: | The Client Secret of your Snowflake developer application. | \ No newline at end of file diff --git a/docs/models/source100ms.md b/docs/models/source100ms.md new file mode 100644 index 00000000..eab6b385 --- /dev/null +++ b/docs/models/source100ms.md @@ -0,0 +1,10 @@ +# Source100ms + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `management_token` | *str* | :heavy_check_mark: | The management token used for authenticating API requests. You can find or generate this token in your 100ms dashboard under the API section. Refer to the documentation at https://www.100ms.live/docs/concepts/v2/concepts/security-and-tokens#management-token-for-rest-api for more details. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.OneHundredms](../models/onehundredms.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceairbyte.md b/docs/models/sourceairbyte.md index 877cd46d..ea7994f9 100644 --- a/docs/models/sourceairbyte.md +++ b/docs/models/sourceairbyte.md @@ -3,9 +3,10 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -| `client_id` | *str* | :heavy_check_mark: | N/A | -| `client_secret` | *str* | :heavy_check_mark: | N/A | -| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | -| `source_type` | [models.Airbyte](../models/airbyte.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | +| `client_id` | *str* | :heavy_check_mark: | N/A | +| `client_secret` | *str* | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `host` | *Optional[str]* | :heavy_minus_sign: | The Host URL of your Self-Managed Deployment (e.x. airbtye.mydomain.com) | +| `source_type` | [models.Airbyte](../models/airbyte.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceairtableoauth20.md b/docs/models/sourceairtableoauth20.md index 192c56b3..7d7324ce 100644 --- a/docs/models/sourceairtableoauth20.md +++ b/docs/models/sourceairtableoauth20.md @@ -6,7 +6,7 @@ | Field | Type | Required | Description | | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | | `client_id` | *str* | :heavy_check_mark: | The client ID of the Airtable developer application. | -| `client_secret` | *str* | :heavy_check_mark: | The client secret the Airtable developer application. | +| `client_secret` | *str* | :heavy_check_mark: | The client secret of the Airtable developer application. | | `refresh_token` | *str* | :heavy_check_mark: | The key to refresh the expired access token. | | `access_token` | *Optional[str]* | :heavy_minus_sign: | Access Token for making authenticated requests. | | `auth_method` | [Optional[models.SourceAirtableSchemasAuthMethod]](../models/sourceairtableschemasauthmethod.md) | :heavy_minus_sign: | N/A | diff --git a/docs/models/sourcealpacabrokerapi.md b/docs/models/sourcealpacabrokerapi.md new file mode 100644 index 00000000..1d719a89 --- /dev/null +++ b/docs/models/sourcealpacabrokerapi.md @@ -0,0 +1,13 @@ +# SourceAlpacaBrokerAPI + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `username` | *str* | :heavy_check_mark: | API Key ID for the alpaca market | +| `environment` | [Optional[models.SourceAlpacaBrokerAPIEnvironment]](../models/sourcealpacabrokerapienvironment.md) | :heavy_minus_sign: | The trading environment, either 'live', 'paper' or 'broker-api.sandbox'. | +| `limit` | *Optional[str]* | :heavy_minus_sign: | Limit for each response objects | +| `password` | *Optional[str]* | :heavy_minus_sign: | Your Alpaca API Secret Key. You can find this in the Alpaca developer web console under your account settings. | +| `source_type` | [models.AlpacaBrokerAPI](../models/alpacabrokerapi.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcealpacabrokerapienvironment.md b/docs/models/sourcealpacabrokerapienvironment.md new file mode 100644 index 00000000..6258685c --- /dev/null +++ b/docs/models/sourcealpacabrokerapienvironment.md @@ -0,0 +1,12 @@ +# SourceAlpacaBrokerAPIEnvironment + +The trading environment, either 'live', 'paper' or 'broker-api.sandbox'. + + +## Values + +| Name | Value | +| -------------------- | -------------------- | +| `API` | api | +| `PAPER_API` | paper-api | +| `BROKER_API_SANDBOX` | broker-api.sandbox | \ No newline at end of file diff --git a/docs/models/sourcealphavantage.md b/docs/models/sourcealphavantage.md new file mode 100644 index 00000000..7ba76236 --- /dev/null +++ b/docs/models/sourcealphavantage.md @@ -0,0 +1,13 @@ +# SourceAlphaVantage + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | +| `api_key` | *str* | :heavy_check_mark: | API Key | | +| `symbol` | *str* | :heavy_check_mark: | Stock symbol (with exchange code) | AAPL | +| `adjusted` | *Optional[bool]* | :heavy_minus_sign: | Whether to return adjusted data. Only applicable to intraday endpoints.
| | +| `interval` | [Optional[models.Interval]](../models/interval.md) | :heavy_minus_sign: | Time-series data point interval. Required for intraday endpoints.
| | +| `outputsize` | [Optional[models.OutputSize]](../models/outputsize.md) | :heavy_minus_sign: | Whether to return full or compact data (the last 100 data points).
| | +| `source_type` | [models.AlphaVantage](../models/alphavantage.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourceamazonads.md b/docs/models/sourceamazonads.md index a08c6aad..8e0ba792 100644 --- a/docs/models/sourceamazonads.md +++ b/docs/models/sourceamazonads.md @@ -11,9 +11,8 @@ | `auth_type` | [Optional[models.SourceAmazonAdsAuthType]](../models/sourceamazonadsauthtype.md) | :heavy_minus_sign: | N/A | | | `look_back_window` | *Optional[int]* | :heavy_minus_sign: | The amount of days to go back in time to get the updated data from Amazon Ads | 3 | | `marketplace_ids` | List[*str*] | :heavy_minus_sign: | Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID. | | +| `num_workers` | *Optional[int]* | :heavy_minus_sign: | The number of worker threads to use for the sync. | 2 | | `profiles` | List[*int*] | :heavy_minus_sign: | Profile IDs you want to fetch data for. The Amazon Ads source connector supports only profiles with seller and vendor type, profiles with agency type will be ignored. See docs for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID. | | | `region` | [Optional[models.Region]](../models/region.md) | :heavy_minus_sign: | Region to pull data from (EU/NA/FE). See docs for more details. | | -| `report_record_types` | List[[models.ReportRecordTypeEnum](../models/reportrecordtypeenum.md)] | :heavy_minus_sign: | Optional configuration which accepts an array of string of record types. Leave blank for default behaviour to pull all report types. Use this config option only if you want to pull specific report type(s). See docs for more details | | | `source_type` | [models.SourceAmazonAdsAmazonAds](../models/sourceamazonadsamazonads.md) | :heavy_check_mark: | N/A | | -| `start_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | The Start date for collecting reports, should not be more than 60 days in the past. In YYYY-MM-DD format | 2022-10-10 | -| `state_filter` | List[[models.StateFilterEnum](../models/statefilterenum.md)] | :heavy_minus_sign: | Reflects the state of the Display, Product, and Brand Campaign streams as enabled, paused, or archived. If you do not populate this field, it will be ignored completely. | | \ No newline at end of file +| `start_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | The Start date for collecting reports, should not be more than 60 days in the past. In YYYY-MM-DD format | 2022-10-10 | \ No newline at end of file diff --git a/docs/models/sourceamazonsellerpartner.md b/docs/models/sourceamazonsellerpartner.md index df845c7e..6736c816 100644 --- a/docs/models/sourceamazonsellerpartner.md +++ b/docs/models/sourceamazonsellerpartner.md @@ -3,18 +3,22 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `lwa_app_id` | *str* | :heavy_check_mark: | Your Login with Amazon Client ID. | | -| `lwa_client_secret` | *str* | :heavy_check_mark: | Your Login with Amazon Client Secret. | | -| `refresh_token` | *str* | :heavy_check_mark: | The Refresh Token obtained via OAuth flow authorization. | | -| `account_type` | [Optional[models.AWSSellerPartnerAccountType]](../models/awssellerpartneraccounttype.md) | :heavy_minus_sign: | Type of the Account you're going to authorize the Airbyte application by | | -| `auth_type` | [Optional[models.SourceAmazonSellerPartnerAuthType]](../models/sourceamazonsellerpartnerauthtype.md) | :heavy_minus_sign: | N/A | | -| `aws_environment` | [Optional[models.AWSEnvironment]](../models/awsenvironment.md) | :heavy_minus_sign: | Select the AWS Environment. | | -| `period_in_days` | *Optional[int]* | :heavy_minus_sign: | For syncs spanning a large date range, this option is used to request data in a smaller fixed window to improve sync reliability. This time window can be configured granularly by day. | | -| `region` | [Optional[models.AWSRegion]](../models/awsregion.md) | :heavy_minus_sign: | Select the AWS Region. | | -| `replication_end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated. | 2017-01-25T00:00:00Z | -| `replication_start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. If start date is not provided or older than 2 years ago from today, the date 2 years ago from today will be used. | 2017-01-25T00:00:00Z | -| `report_options_list` | List[[models.ReportOptions](../models/reportoptions.md)] | :heavy_minus_sign: | Additional information passed to reports. This varies by report type. | | -| `source_type` | [models.SourceAmazonSellerPartnerAmazonSellerPartner](../models/sourceamazonsellerpartneramazonsellerpartner.md) | :heavy_check_mark: | N/A | | -| `wait_to_avoid_fatal_errors` | *Optional[bool]* | :heavy_minus_sign: | For report based streams with known amount of requests per time period, this option will use waiting time between requests to avoid fatal statuses in reports. See Troubleshooting section for more details | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `lwa_app_id` | *str* | :heavy_check_mark: | Your Login with Amazon Client ID. | | +| `lwa_client_secret` | *str* | :heavy_check_mark: | Your Login with Amazon Client Secret. | | +| `refresh_token` | *str* | :heavy_check_mark: | The Refresh Token obtained via OAuth flow authorization. | | +| `account_type` | [Optional[models.AWSSellerPartnerAccountType]](../models/awssellerpartneraccounttype.md) | :heavy_minus_sign: | Type of the Account you're going to authorize the Airbyte application by | | +| `app_id` | *Optional[str]* | :heavy_minus_sign: | Your Amazon Application ID. | | +| `auth_type` | [Optional[models.SourceAmazonSellerPartnerAuthType]](../models/sourceamazonsellerpartnerauthtype.md) | :heavy_minus_sign: | N/A | | +| `aws_environment` | [Optional[models.AWSEnvironment]](../models/awsenvironment.md) | :heavy_minus_sign: | Select the AWS Environment. | | +| `financial_events_step` | [Optional[models.FinancialEventsStepSizeInDays]](../models/financialeventsstepsizeindays.md) | :heavy_minus_sign: | The time window size (in days) for fetching financial events data in chunks. Options are 1 day, 7 days, 14 days, 30 days, 60 days, and 190 days, based on API limitations.

- **Smaller step sizes (e.g., 1 day)** are better for large data volumes. They fetch smaller chunks per request, reducing the risk of timeouts or overwhelming the API, though more requests may slow syncing and increase the chance of hitting rate limits.
- **Larger step sizes (e.g., 14 days)** are better for smaller data volumes. They fetch more data per request, speeding up syncing and reducing the number of API calls, which minimizes strain on rate limits.

Select a step size that matches your data volume to optimize syncing speed and API performance. | | +| `max_async_job_count` | *Optional[int]* | :heavy_minus_sign: | The maximum number of concurrent asynchronous job requests that can be active at a time. | | +| `num_workers` | *Optional[int]* | :heavy_minus_sign: | The number of workers to use for the connector when syncing concurrently. | | +| `period_in_days` | *Optional[int]* | :heavy_minus_sign: | For syncs spanning a large date range, this option is used to request data in a smaller fixed window to improve sync reliability. This time window can be configured granularly by day. | | +| `region` | [Optional[models.AWSRegion]](../models/awsregion.md) | :heavy_minus_sign: | Select the AWS Region. | | +| `replication_end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated. | 2017-01-25T00:00:00Z | +| `replication_start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. If start date is not provided or older than 2 years ago from today, the date 2 years ago from today will be used. | 2017-01-25T00:00:00Z | +| `report_options_list` | List[[models.ReportOptions](../models/reportoptions.md)] | :heavy_minus_sign: | Additional information passed to reports. This varies by report type. | | +| `source_type` | [models.SourceAmazonSellerPartnerAmazonSellerPartner](../models/sourceamazonsellerpartneramazonsellerpartner.md) | :heavy_check_mark: | N/A | | +| `wait_to_avoid_fatal_errors` | *Optional[bool]* | :heavy_minus_sign: | For report based streams with known amount of requests per time period, this option will use waiting time between requests to avoid fatal statuses in reports. See Troubleshooting section for more details | | \ No newline at end of file diff --git a/docs/models/sourceamazonsqs.md b/docs/models/sourceamazonsqs.md index fb920dd6..509e79ea 100644 --- a/docs/models/sourceamazonsqs.md +++ b/docs/models/sourceamazonsqs.md @@ -3,15 +3,15 @@ ## Fields -| Field | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `queue_url` | *str* | :heavy_check_mark: | URL of the SQS Queue | https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue | -| `region` | [models.SourceAmazonSqsAWSRegion](../models/sourceamazonsqsawsregion.md) | :heavy_check_mark: | AWS Region of the SQS Queue | | -| `access_key` | *Optional[str]* | :heavy_minus_sign: | The Access Key ID of the AWS IAM Role to use for pulling messages | xxxxxHRNxxx3TBxxxxxx | -| `attributes_to_return` | *Optional[str]* | :heavy_minus_sign: | Comma separated list of Mesage Attribute names to return | attr1,attr2 | -| `delete_messages` | *Optional[bool]* | :heavy_minus_sign: | If Enabled, messages will be deleted from the SQS Queue after being read. If Disabled, messages are left in the queue and can be read more than once. WARNING: Enabling this option can result in data loss in cases of failure, use with caution, see documentation for more detail. | | -| `max_batch_size` | *Optional[int]* | :heavy_minus_sign: | Max amount of messages to get in one batch (10 max) | 5 | -| `max_wait_time` | *Optional[int]* | :heavy_minus_sign: | Max amount of time in seconds to wait for messages in a single poll (20 max) | 5 | -| `secret_key` | *Optional[str]* | :heavy_minus_sign: | The Secret Key of the AWS IAM Role to use for pulling messages | hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz | -| `source_type` | [models.AmazonSqs](../models/amazonsqs.md) | :heavy_check_mark: | N/A | | -| `visibility_timeout` | *Optional[int]* | :heavy_minus_sign: | Modify the Visibility Timeout of the individual message from the Queue's default (seconds). | 15 | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `access_key` | *str* | :heavy_check_mark: | The Access Key ID of the AWS IAM Role to use for pulling messages | xxxxxHRNxxx3TBxxxxxx | +| `queue_url` | *str* | :heavy_check_mark: | URL of the SQS Queue | https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue | +| `secret_key` | *str* | :heavy_check_mark: | The Secret Key of the AWS IAM Role to use for pulling messages | hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz | +| `attributes_to_return` | *Optional[str]* | :heavy_minus_sign: | Comma separated list of Mesage Attribute names to return | attr1,attr2 | +| `max_batch_size` | *Optional[int]* | :heavy_minus_sign: | Max amount of messages to get in one batch (10 max) | 5 | +| `max_wait_time` | *Optional[int]* | :heavy_minus_sign: | Max amount of time in seconds to wait for messages in a single poll (20 max) | 5 | +| `region` | [Optional[models.SourceAmazonSqsAWSRegion]](../models/sourceamazonsqsawsregion.md) | :heavy_minus_sign: | AWS Region of the SQS Queue | | +| `source_type` | [models.AmazonSqs](../models/amazonsqs.md) | :heavy_check_mark: | N/A | | +| `target` | [Optional[models.TheTargetedActionResourceForTheFetch]](../models/thetargetedactionresourceforthefetch.md) | :heavy_minus_sign: | Note - Different targets have different attribute enum requirements, please refer actions sections in https://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/Welcome.html | | +| `visibility_timeout` | *Optional[int]* | :heavy_minus_sign: | Modify the Visibility Timeout of the individual message from the Queue's default (seconds). | 20 | \ No newline at end of file diff --git a/docs/models/sourceamplitude.md b/docs/models/sourceamplitude.md index 681e22fc..2085ec20 100644 --- a/docs/models/sourceamplitude.md +++ b/docs/models/sourceamplitude.md @@ -3,12 +3,12 @@ ## Fields -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `api_key` | *str* | :heavy_check_mark: | Amplitude API Key. See the setup guide for more information on how to obtain this key. | | -| `secret_key` | *str* | :heavy_check_mark: | Amplitude Secret Key. See the setup guide for more information on how to obtain this key. | | -| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated. | 2021-01-25T00:00:00Z | -| `active_users_group_by_country` | *Optional[bool]* | :heavy_minus_sign: | According to Considerations the grouping by `Country` is optional, if you're facing issues fetching the stream, or checking the connection please set this to `False` instead. | | -| `data_region` | [Optional[models.DataRegion]](../models/dataregion.md) | :heavy_minus_sign: | Amplitude data region server | | -| `request_time_range` | *Optional[int]* | :heavy_minus_sign: | According to Considerations too big time range in request can cause a timeout error. In this case, set shorter time interval in hours. | | -| `source_type` | [models.Amplitude](../models/amplitude.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Amplitude API Key. See the setup guide for more information on how to obtain this key. | | +| `secret_key` | *str* | :heavy_check_mark: | Amplitude Secret Key. See the setup guide for more information on how to obtain this key. | | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated. | 2021-01-25T00:00:00Z | +| `active_users_group_by_country` | *Optional[bool]* | :heavy_minus_sign: | According to Amplitude documentation, grouping by `Country` is optional. If you face issues fetching the stream or checking the connection please set this field to `False`.
| | +| `data_region` | [Optional[models.DataRegion]](../models/dataregion.md) | :heavy_minus_sign: | Amplitude data region server | | +| `request_time_range` | *Optional[int]* | :heavy_minus_sign: | According to Considerations too large of a time range in te request can cause a timeout error. In this case, please provide a shorter time interval in hours.
| | +| `source_type` | [models.Amplitude](../models/amplitude.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourceapplesearchads.md b/docs/models/sourceapplesearchads.md index 989620d4..85c81776 100644 --- a/docs/models/sourceapplesearchads.md +++ b/docs/models/sourceapplesearchads.md @@ -9,5 +9,9 @@ | `client_secret` | *str* | :heavy_check_mark: | A string that authenticates the user’s setup request. See here | | | `org_id` | *int* | :heavy_check_mark: | The identifier of the organization that owns the campaign. Your Org Id is the same as your account in the Apple Search Ads UI. | | | `start_date` | *str* | :heavy_check_mark: | Start getting data from that date. | 2020-01-01 | +| `backoff_factor` | *Optional[int]* | :heavy_minus_sign: | This factor factor determines the delay increase factor between retryable failures. Valid values are integers between 1 and 20. | 10 | | `end_date` | *Optional[str]* | :heavy_minus_sign: | Data is retrieved until that date (included) | 2021-01-01 | -| `source_type` | [models.AppleSearchAds](../models/applesearchads.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| `lookback_window` | *Optional[int]* | :heavy_minus_sign: | Apple Search Ads uses a 30-day attribution window. However, you may consider smaller values in order to shorten sync durations, at the cost of missing late data attributions. | 7 | +| `source_type` | [models.AppleSearchAds](../models/applesearchads.md) | :heavy_check_mark: | N/A | | +| `timezone` | [Optional[models.TimeZone]](../models/timezone.md) | :heavy_minus_sign: | The timezone for the reporting data. Use 'ORTZ' for Organization Time Zone or 'UTC' for Coordinated Universal Time. Default is UTC. | | +| `token_refresh_endpoint` | *Optional[str]* | :heavy_minus_sign: | Token Refresh Endpoint. You should override the default value in scenarios where it's required to proxy requests to Apple's token endpoint | | \ No newline at end of file diff --git a/docs/models/sourceappsflyer.md b/docs/models/sourceappsflyer.md new file mode 100644 index 00000000..9e9454ae --- /dev/null +++ b/docs/models/sourceappsflyer.md @@ -0,0 +1,12 @@ +# SourceAppsflyer + + +## Fields + +| Field | Type | Required | Description | Example | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `api_token` | *str* | :heavy_check_mark: | Pull API token for authentication. If you change the account admin, the token changes, and you must update scripts with the new token. Get the API token in the Dashboard. | | +| `app_id` | *str* | :heavy_check_mark: | App identifier as found in AppsFlyer. | | +| `start_date` | *str* | :heavy_check_mark: | The default value to use if no bookmark exists for an endpoint. Raw Reports historical lookback is limited to 90 days. | 2021-11-16 | +| `source_type` | [models.Appsflyer](../models/appsflyer.md) | :heavy_check_mark: | N/A | | +| `timezone` | *Optional[str]* | :heavy_minus_sign: | Time zone in which date times are stored. The project timezone may be found in the App settings in the AppsFlyer console. | US/Pacific | \ No newline at end of file diff --git a/docs/models/sourceasana.md b/docs/models/sourceasana.md index f59bae67..190a2f1a 100644 --- a/docs/models/sourceasana.md +++ b/docs/models/sourceasana.md @@ -3,8 +3,9 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -| `credentials` | [Optional[models.AuthenticationMechanism]](../models/authenticationmechanism.md) | :heavy_minus_sign: | Choose how to authenticate to Github | -| `organization_export_ids` | List[*Any*] | :heavy_minus_sign: | Globally unique identifiers for the organization exports | -| `source_type` | [Optional[models.SourceAsanaAsana]](../models/sourceasanaasana.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | Example | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `credentials` | [Optional[models.AuthenticationMechanism]](../models/authenticationmechanism.md) | :heavy_minus_sign: | Choose how to authenticate to Github | | +| `num_workers` | *Optional[int]* | :heavy_minus_sign: | The number of worker threads to use for the sync. The performance upper boundary is based on the limit of your Asana pricing plan. More info about the rate limit tiers can be found on Asana's API docs. | 1 | +| `organization_export_ids` | List[*Any*] | :heavy_minus_sign: | Globally unique identifiers for the organization exports | | +| `source_type` | [Optional[models.SourceAsanaAsana]](../models/sourceasanaasana.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/sourceassemblyai.md b/docs/models/sourceassemblyai.md new file mode 100644 index 00000000..ecb226fc --- /dev/null +++ b/docs/models/sourceassemblyai.md @@ -0,0 +1,12 @@ +# SourceAssemblyai + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your AssemblyAI API key. You can find it in the AssemblyAI dashboard at https://www.assemblyai.com/app/api-keys. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `request_id` | *Optional[str]* | :heavy_minus_sign: | The request ID for LeMur responses | +| `source_type` | [models.Assemblyai](../models/assemblyai.md) | :heavy_check_mark: | N/A | +| `subtitle_format` | [Optional[models.SubtitleFormat]](../models/subtitleformat.md) | :heavy_minus_sign: | The subtitle format for transcript_subtitle stream | \ No newline at end of file diff --git a/docs/models/sourceauth0schemasauthenticationmethod.md b/docs/models/sourceauth0schemasauthenticationmethod.md index 0a05b614..140b0638 100644 --- a/docs/models/sourceauth0schemasauthenticationmethod.md +++ b/docs/models/sourceauth0schemasauthenticationmethod.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| --------------------------------- | --------------------------------- | -| `OAUTH2_CONFIDENTIAL_APPLICATION` | oauth2_confidential_application | \ No newline at end of file +| Name | Value | +| --------------------- | --------------------- | +| `OAUTH2_ACCESS_TOKEN` | oauth2_access_token | \ No newline at end of file diff --git a/docs/models/sourceauth0schemascredentialsauthenticationmethod.md b/docs/models/sourceauth0schemascredentialsauthenticationmethod.md index dc393997..f962c1dc 100644 --- a/docs/models/sourceauth0schemascredentialsauthenticationmethod.md +++ b/docs/models/sourceauth0schemascredentialsauthenticationmethod.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| --------------------- | --------------------- | -| `OAUTH2_ACCESS_TOKEN` | oauth2_access_token | \ No newline at end of file +| Name | Value | +| --------------------------------- | --------------------------------- | +| `OAUTH2_CONFIDENTIAL_APPLICATION` | oauth2_confidential_application | \ No newline at end of file diff --git a/docs/models/sourceaviationstack.md b/docs/models/sourceaviationstack.md new file mode 100644 index 00000000..bfd6e797 --- /dev/null +++ b/docs/models/sourceaviationstack.md @@ -0,0 +1,10 @@ +# SourceAviationstack + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `access_key` | *str* | :heavy_check_mark: | Your unique API key for authenticating with the Aviation API. You can find it in your Aviation account dashboard at https://aviationstack.com/dashboard | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Aviationstack](../models/aviationstack.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceawinadvertiser.md b/docs/models/sourceawinadvertiser.md new file mode 100644 index 00000000..b7496482 --- /dev/null +++ b/docs/models/sourceawinadvertiser.md @@ -0,0 +1,13 @@ +# SourceAwinAdvertiser + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `advertiser_id` | *str* | :heavy_check_mark: | Your Awin Advertiser ID. You can find this in your Awin dashboard or account settings. | +| `api_key` | *str* | :heavy_check_mark: | Your Awin API key. Generate this from your Awin account under API Credentials. | +| `lookback_days` | *int* | :heavy_check_mark: | Number of days to look back on each sync to catch any updates to existing records. | +| `start_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_check_mark: | Start date for data replication in YYYY-MM-DD format | +| `source_type` | [models.AwinAdvertiser](../models/awinadvertiser.md) | :heavy_check_mark: | N/A | +| `step_increment` | *Optional[str]* | :heavy_minus_sign: | The time window size for each API request in ISO8601 duration format.
For the campaign performance stream, Awin API explicitly limits the period between startDate and endDate to 400 days maximum.
| \ No newline at end of file diff --git a/docs/models/sourceazureblobstorageauthentication.md b/docs/models/sourceazureblobstorageauthentication.md index a3363d18..a90949c7 100644 --- a/docs/models/sourceazureblobstorageauthentication.md +++ b/docs/models/sourceazureblobstorageauthentication.md @@ -11,6 +11,12 @@ Credentials for connecting to the Azure Blob Storage value: models.AuthenticateViaOauth2 = /* values here */ ``` +### `models.AuthenticateViaClientCredentials` + +```python +value: models.AuthenticateViaClientCredentials = /* values here */ +``` + ### `models.AuthenticateViaStorageAccountKey` ```python diff --git a/docs/models/sourceazureblobstorageschemasauthtype.md b/docs/models/sourceazureblobstorageschemasauthtype.md index a6176c71..c5a536f9 100644 --- a/docs/models/sourceazureblobstorageschemasauthtype.md +++ b/docs/models/sourceazureblobstorageschemasauthtype.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| --------------------- | --------------------- | -| `STORAGE_ACCOUNT_KEY` | storage_account_key | \ No newline at end of file +| Name | Value | +| -------------------- | -------------------- | +| `CLIENT_CREDENTIALS` | client_credentials | \ No newline at end of file diff --git a/docs/models/sourceazureblobstorageschemascredentialsauthtype.md b/docs/models/sourceazureblobstorageschemascredentialsauthtype.md new file mode 100644 index 00000000..1b337071 --- /dev/null +++ b/docs/models/sourceazureblobstorageschemascredentialsauthtype.md @@ -0,0 +1,8 @@ +# SourceAzureBlobStorageSchemasCredentialsAuthType + + +## Values + +| Name | Value | +| --------------------- | --------------------- | +| `STORAGE_ACCOUNT_KEY` | storage_account_key | \ No newline at end of file diff --git a/docs/models/sourcebabelforce.md b/docs/models/sourcebabelforce.md new file mode 100644 index 00000000..18396d44 --- /dev/null +++ b/docs/models/sourcebabelforce.md @@ -0,0 +1,13 @@ +# SourceBabelforce + + +## Fields + +| Field | Type | Required | Description | Example | +| --------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | +| `access_key_id` | *str* | :heavy_check_mark: | The Babelforce access key ID | | +| `access_token` | *str* | :heavy_check_mark: | The Babelforce access token | | +| `date_created_from` | *Optional[int]* | :heavy_minus_sign: | Timestamp in Unix the replication from Babelforce API will start from. For example 1651363200 which corresponds to 2022-05-01 00:00:00. | 1651363200 | +| `date_created_to` | *Optional[int]* | :heavy_minus_sign: | Timestamp in Unix the replication from Babelforce will be up to. For example 1651363200 which corresponds to 2022-05-01 00:00:00. | 1651363200 | +| `region` | [Optional[models.SourceBabelforceRegion]](../models/sourcebabelforceregion.md) | :heavy_minus_sign: | Babelforce region | | +| `source_type` | [models.Babelforce](../models/babelforce.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcebabelforceregion.md b/docs/models/sourcebabelforceregion.md new file mode 100644 index 00000000..06ecd743 --- /dev/null +++ b/docs/models/sourcebabelforceregion.md @@ -0,0 +1,12 @@ +# SourceBabelforceRegion + +Babelforce region + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `SERVICES` | services | +| `US_EAST` | us-east | +| `AP_SOUTHEAST` | ap-southeast | \ No newline at end of file diff --git a/docs/models/sourcebamboohr.md b/docs/models/sourcebamboohr.md index 151aee3d..6ef4cac2 100644 --- a/docs/models/sourcebamboohr.md +++ b/docs/models/sourcebamboohr.md @@ -9,5 +9,6 @@ | `subdomain` | *str* | :heavy_check_mark: | Sub Domain of bamboo hr | | `custom_reports_fields` | *Optional[str]* | :heavy_minus_sign: | Comma-separated list of fields to include in custom reports. | | `custom_reports_include_default_fields` | *Optional[bool]* | :heavy_minus_sign: | If true, the custom reports endpoint will include the default fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names. | +| `employee_fields` | *Optional[str]* | :heavy_minus_sign: | Comma-separated list of fields to include for employees. | | `source_type` | [models.BambooHr](../models/bamboohr.md) | :heavy_check_mark: | N/A | | `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcebluetally.md b/docs/models/sourcebluetally.md new file mode 100644 index 00000000..f244960b --- /dev/null +++ b/docs/models/sourcebluetally.md @@ -0,0 +1,10 @@ +# SourceBluetally + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your API key to authenticate with the BlueTally API. You can generate it by navigating to your account settings, selecting 'API Keys', and clicking 'Create API Key'. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Bluetally](../models/bluetally.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceboldsign.md b/docs/models/sourceboldsign.md new file mode 100644 index 00000000..d90c531c --- /dev/null +++ b/docs/models/sourceboldsign.md @@ -0,0 +1,10 @@ +# SourceBoldsign + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your BoldSign API key. You can generate it by navigating to the API menu in the BoldSign app, selecting 'API Key', and clicking 'Generate API Key'. Copy the generated key and paste it here. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Boldsign](../models/boldsign.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcebreezometer.md b/docs/models/sourcebreezometer.md new file mode 100644 index 00000000..7abcf4ef --- /dev/null +++ b/docs/models/sourcebreezometer.md @@ -0,0 +1,15 @@ +# SourceBreezometer + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your API Access Key. See here. | | +| `latitude` | *str* | :heavy_check_mark: | Latitude of the monitored location. | 54.675003 | +| `longitude` | *str* | :heavy_check_mark: | Longitude of the monitored location. | -113.550282 | +| `days_to_forecast` | *Optional[int]* | :heavy_minus_sign: | Number of days to forecast. Minimum 1, maximum 3. Valid for Polen and Weather Forecast streams. | 3 | +| `historic_hours` | *Optional[int]* | :heavy_minus_sign: | Number of hours retireve from Air Quality History stream. Minimum 1, maximum 720. | 30 | +| `hours_to_forecast` | *Optional[int]* | :heavy_minus_sign: | Number of hours to forecast. Minimum 1, maximum 96. Valid for Air Quality Forecast stream. | 30 | +| `radius` | *Optional[int]* | :heavy_minus_sign: | Desired radius from the location provided. Minimum 5, maximum 100. Valid for Wildfires streams. | 50 | +| `source_type` | [models.Breezometer](../models/breezometer.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcecalendly.md b/docs/models/sourcecalendly.md index ea214ac9..096bbf47 100644 --- a/docs/models/sourcecalendly.md +++ b/docs/models/sourcecalendly.md @@ -3,8 +3,9 @@ ## Fields -| Field | Type | Required | Description | -| --------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------- | -| `api_key` | *str* | :heavy_check_mark: | Go to Integrations → API & Webhooks to obtain your bearer token. https://calendly.com/integrations/api_webhooks | -| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | -| `source_type` | [models.Calendly](../models/calendly.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Go to Integrations → API & Webhooks to obtain your bearer token. https://calendly.com/integrations/api_webhooks | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `lookback_days` | *Optional[float]* | :heavy_minus_sign: | Number of days to be subtracted from the last cutoff date before starting to sync the `scheduled_events` stream. | +| `source_type` | [models.Calendly](../models/calendly.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcecaptaindata.md b/docs/models/sourcecaptaindata.md new file mode 100644 index 00000000..41730d2f --- /dev/null +++ b/docs/models/sourcecaptaindata.md @@ -0,0 +1,10 @@ +# SourceCaptainData + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------- | ---------------------------------------------- | ---------------------------------------------- | ---------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your Captain Data project API key. | +| `project_uid` | *str* | :heavy_check_mark: | Your Captain Data project uuid. | +| `source_type` | [models.CaptainData](../models/captaindata.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcechurnkey.md b/docs/models/sourcechurnkey.md new file mode 100644 index 00000000..b91d06ea --- /dev/null +++ b/docs/models/sourcechurnkey.md @@ -0,0 +1,10 @@ +# SourceChurnkey + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------- | ---------------------------------------- | ---------------------------------------- | ---------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `x_ck_app` | *str* | :heavy_check_mark: | N/A | +| `source_type` | [models.Churnkey](../models/churnkey.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcecircleci.md b/docs/models/sourcecircleci.md index 9022f56d..9a26a2b3 100644 --- a/docs/models/sourcecircleci.md +++ b/docs/models/sourcecircleci.md @@ -3,14 +3,12 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -| `api_key` | *str* | :heavy_check_mark: | N/A | -| `org_id` | *str* | :heavy_check_mark: | The org ID found in `https://app.circleci.com/settings/organization/circleci/xxxxx/overview` | -| `project_id` | *str* | :heavy_check_mark: | Project ID found in the project settings | -| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | -| `job_id` | *Optional[str]* | :heavy_minus_sign: | Job ID for fetching information | -| `job_number` | *Optional[str]* | :heavy_minus_sign: | Job Number of the workflow | -| `source_type` | [models.Circleci](../models/circleci.md) | :heavy_check_mark: | N/A | -| `workflow_id` | *Optional[str]* | :heavy_minus_sign: | workflow ID of a project pipeline | -| `workflow_name` | *Optional[str]* | :heavy_minus_sign: | Workflow name for fetching information | \ No newline at end of file +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `org_id` | *str* | :heavy_check_mark: | The org ID found in `https://app.circleci.com/settings/organization/circleci/xxxxx/overview` | +| `project_id` | *str* | :heavy_check_mark: | Project ID found in the project settings, Visit `https://app.circleci.com/settings/project/circleci/ORG_SLUG/YYYYY` | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `job_number` | *Optional[str]* | :heavy_minus_sign: | Job Number of the workflow for `jobs` stream, Auto fetches from `workflow_jobs` stream, if not configured | +| `source_type` | [models.Circleci](../models/circleci.md) | :heavy_check_mark: | N/A | +| `workflow_id` | List[*Any*] | :heavy_minus_sign: | Workflow ID of a project pipeline, Could be seen in the URL of pipeline build, Example `https://app.circleci.com/pipelines/circleci/55555xxxxxx/7yyyyyyyyxxxxx/2/workflows/WORKFLOW_ID` | \ No newline at end of file diff --git a/docs/models/sourceciscomeraki.md b/docs/models/sourceciscomeraki.md new file mode 100644 index 00000000..49d4f8d6 --- /dev/null +++ b/docs/models/sourceciscomeraki.md @@ -0,0 +1,10 @@ +# SourceCiscoMeraki + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your Meraki API key. Obtain it by logging into your Meraki Dashboard at https://dashboard.meraki.com/, navigating to 'My Profile' via the avatar icon in the top right corner, and generating the API key. Save this key securely as it represents your admin credentials. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.CiscoMeraki](../models/ciscomeraki.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcecoingeckocoins.md b/docs/models/sourcecoingeckocoins.md new file mode 100644 index 00000000..bda4cb8b --- /dev/null +++ b/docs/models/sourcecoingeckocoins.md @@ -0,0 +1,14 @@ +# SourceCoingeckoCoins + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | +| `coin_id` | *str* | :heavy_check_mark: | CoinGecko coin ID (e.g. bitcoin). Can be retrieved from the
`/coins/list` endpoint.
| +| `start_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_check_mark: | The start date for the historical data stream in dd-mm-yyyy format.
| +| `vs_currency` | *str* | :heavy_check_mark: | The target currency of market data (e.g. usd, eur, jpy, etc.)
| +| `api_key` | *Optional[str]* | :heavy_minus_sign: | API Key (for pro users) | +| `days` | [Optional[models.Days]](../models/days.md) | :heavy_minus_sign: | The number of days of data for market chart.
| +| `end_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | The end date for the historical data stream in dd-mm-yyyy format.
| +| `source_type` | [models.CoingeckoCoins](../models/coingeckocoins.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceconfiguration.md b/docs/models/sourceconfiguration.md index ba1a541f..9ba9b679 100644 --- a/docs/models/sourceconfiguration.md +++ b/docs/models/sourceconfiguration.md @@ -11,6 +11,12 @@ The values required to configure the source. value: models.SourceAha = /* values here */ ``` +### `models.Source100ms` + +```python +value: models.Source100ms = /* values here */ +``` + ### `models.Source7shifts` ```python @@ -59,6 +65,18 @@ value: models.SourceAkeneo = /* values here */ value: models.SourceAlgolia = /* values here */ ``` +### `models.SourceAlpacaBrokerAPI` + +```python +value: models.SourceAlpacaBrokerAPI = /* values here */ +``` + +### `models.SourceAlphaVantage` + +```python +value: models.SourceAlphaVantage = /* values here */ +``` + ### `models.SourceAmazonAds` ```python @@ -113,6 +131,12 @@ value: models.SourceAppfollow = /* values here */ value: models.SourceAppleSearchAds = /* values here */ ``` +### `models.SourceAppsflyer` + +```python +value: models.SourceAppsflyer = /* values here */ +``` + ### `models.SourceApptivo` ```python @@ -131,12 +155,30 @@ value: models.SourceAsana = /* values here */ value: models.SourceAshby = /* values here */ ``` +### `models.SourceAssemblyai` + +```python +value: models.SourceAssemblyai = /* values here */ +``` + ### `models.SourceAuth0` ```python value: models.SourceAuth0 = /* values here */ ``` +### `models.SourceAviationstack` + +```python +value: models.SourceAviationstack = /* values here */ +``` + +### `models.SourceAwinAdvertiser` + +```python +value: models.SourceAwinAdvertiser = /* values here */ +``` + ### `models.SourceAwsCloudtrail` ```python @@ -155,6 +197,12 @@ value: models.SourceAzureBlobStorage = /* values here */ value: models.SourceAzureTable = /* values here */ ``` +### `models.SourceBabelforce` + +```python +value: models.SourceBabelforce = /* values here */ +``` + ### `models.SourceBambooHr` ```python @@ -203,6 +251,18 @@ value: models.SourceBitly = /* values here */ value: models.SourceBlogger = /* values here */ ``` +### `models.SourceBluetally` + +```python +value: models.SourceBluetally = /* values here */ +``` + +### `models.SourceBoldsign` + +```python +value: models.SourceBoldsign = /* values here */ +``` + ### `models.SourceBox` ```python @@ -221,6 +281,12 @@ value: models.SourceBraintree = /* values here */ value: models.SourceBraze = /* values here */ ``` +### `models.SourceBreezometer` + +```python +value: models.SourceBreezometer = /* values here */ +``` + ### `models.SourceBreezyHr` ```python @@ -305,6 +371,12 @@ value: models.SourceCanny = /* values here */ value: models.SourceCapsuleCrm = /* values here */ ``` +### `models.SourceCaptainData` + +```python +value: models.SourceCaptainData = /* values here */ +``` + ### `models.SourceCareQualityCommission` ```python @@ -353,6 +425,12 @@ value: models.SourceChargify = /* values here */ value: models.SourceChartmogul = /* values here */ ``` +### `models.SourceChurnkey` + +```python +value: models.SourceChurnkey = /* values here */ +``` + ### `models.SourceCimis` ```python @@ -377,6 +455,12 @@ value: models.SourceCirca = /* values here */ value: models.SourceCircleci = /* values here */ ``` +### `models.SourceCiscoMeraki` + +```python +value: models.SourceCiscoMeraki = /* values here */ +``` + ### `models.SourceClarifAi` ```python @@ -449,6 +533,12 @@ value: models.SourceCodefresh = /* values here */ value: models.SourceCoinAPI = /* values here */ ``` +### `models.SourceCoingeckoCoins` + +```python +value: models.SourceCoingeckoCoins = /* values here */ +``` + ### `models.SourceCoinmarketcap` ```python @@ -491,6 +581,12 @@ value: models.SourceConvex = /* values here */ value: models.SourceCopper = /* values here */ ``` +### `models.SourceCouchbase` + +```python +value: models.SourceCouchbase = /* values here */ +``` + ### `models.SourceCountercyclical` ```python @@ -503,6 +599,12 @@ value: models.SourceCountercyclical = /* values here */ value: models.SourceCustomerIo = /* values here */ ``` +### `models.SourceCustomerly` + +```python +value: models.SourceCustomerly = /* values here */ +``` + ### `models.SourceDatadog` ```python @@ -533,6 +635,12 @@ value: models.SourceDelighted = /* values here */ value: models.SourceDeputy = /* values here */ ``` +### `models.SourceDingConnect` + +```python +value: models.SourceDingConnect = /* values here */ +``` + ### `models.SourceDixa` ```python @@ -545,6 +653,18 @@ value: models.SourceDixa = /* values here */ value: models.SourceDockerhub = /* values here */ ``` +### `models.SourceDocuseal` + +```python +value: models.SourceDocuseal = /* values here */ +``` + +### `models.SourceDolibarr` + +```python +value: models.SourceDolibarr = /* values here */ +``` + ### `models.SourceDremio` ```python @@ -569,6 +689,12 @@ value: models.SourceDrip = /* values here */ value: models.SourceDropboxSign = /* values here */ ``` +### `models.SourceDwolla` + +```python +value: models.SourceDwolla = /* values here */ +``` + ### `models.SourceDynamodb` ```python @@ -593,12 +719,30 @@ value: models.SourceEasypost = /* values here */ value: models.SourceEasypromos = /* values here */ ``` +### `models.SourceEbayFinance` + +```python +value: models.SourceEbayFinance = /* values here */ +``` + +### `models.SourceEbayFulfillment` + +```python +value: models.SourceEbayFulfillment = /* values here */ +``` + ### `models.SourceElasticemail` ```python value: models.SourceElasticemail = /* values here */ ``` +### `models.SourceElasticsearch` + +```python +value: models.SourceElasticsearch = /* values here */ +``` + ### `models.SourceEmailoctopus` ```python @@ -635,6 +779,12 @@ value: models.SourceEventee = /* values here */ value: models.SourceEventzilla = /* values here */ ``` +### `models.SourceEverhour` + +```python +value: models.SourceEverhour = /* values here */ +``` + ### `models.SourceExchangeRates` ```python @@ -653,6 +803,12 @@ value: models.SourceEzofficeinventory = /* values here */ value: models.SourceFacebookMarketing = /* values here */ ``` +### `models.SourceFacebookPages` + +```python +value: models.SourceFacebookPages = /* values here */ +``` + ### `models.SourceFactorial` ```python @@ -665,6 +821,18 @@ value: models.SourceFactorial = /* values here */ value: models.SourceFaker = /* values here */ ``` +### `models.SourceFastbill` + +```python +value: models.SourceFastbill = /* values here */ +``` + +### `models.SourceFastly` + +```python +value: models.SourceFastly = /* values here */ +``` + ### `models.SourceFauna` ```python @@ -815,6 +983,12 @@ value: models.SourceFront = /* values here */ value: models.SourceFulcrum = /* values here */ ``` +### `models.SourceFullstory` + +```python +value: models.SourceFullstory = /* values here */ +``` + ### `models.SourceGainsightPx` ```python @@ -839,6 +1013,12 @@ value: models.SourceGetgist = /* values here */ value: models.SourceGetlago = /* values here */ ``` +### `models.SourceGiphy` + +```python +value: models.SourceGiphy = /* values here */ +``` + ### `models.SourceGitbook` ```python @@ -887,6 +1067,12 @@ value: models.SourceGocardless = /* values here */ value: models.SourceGoldcast = /* values here */ ``` +### `models.SourceGologin` + +```python +value: models.SourceGologin = /* values here */ +``` + ### `models.SourceGong` ```python @@ -977,6 +1163,12 @@ value: models.SourceGorgias = /* values here */ value: models.SourceGreenhouse = /* values here */ ``` +### `models.SourceGreythr` + +```python +value: models.SourceGreythr = /* values here */ +``` + ### `models.SourceGridly` ```python @@ -1001,6 +1193,12 @@ value: models.SourceGutendex = /* values here */ value: models.SourceHardcodedRecords = /* values here */ ``` +### `models.SourceHarness` + +```python +value: models.SourceHarness = /* values here */ +``` + ### `models.SourceHarvest` ```python @@ -1013,6 +1211,18 @@ value: models.SourceHarvest = /* values here */ value: models.SourceHeight = /* values here */ ``` +### `models.SourceHellobaton` + +```python +value: models.SourceHellobaton = /* values here */ +``` + +### `models.SourceHelpScout` + +```python +value: models.SourceHelpScout = /* values here */ +``` + ### `models.SourceHibob` ```python @@ -1025,6 +1235,12 @@ value: models.SourceHibob = /* values here */ value: models.SourceHighLevel = /* values here */ ``` +### `models.SourceHoorayhr` + +```python +value: models.SourceHoorayhr = /* values here */ +``` + ### `models.SourceHubplanner` ```python @@ -1037,18 +1253,36 @@ value: models.SourceHubplanner = /* values here */ value: models.SourceHubspot = /* values here */ ``` +### `models.SourceHuggingFaceDatasets` + +```python +value: models.SourceHuggingFaceDatasets = /* values here */ +``` + ### `models.SourceHumanitix` ```python value: models.SourceHumanitix = /* values here */ ``` +### `models.SourceHuntr` + +```python +value: models.SourceHuntr = /* values here */ +``` + ### `models.SourceIlluminaBasespace` ```python value: models.SourceIlluminaBasespace = /* values here */ ``` +### `models.SourceImagga` + +```python +value: models.SourceImagga = /* values here */ +``` + ### `models.SourceIncidentIo` ```python @@ -1061,6 +1295,12 @@ value: models.SourceIncidentIo = /* values here */ value: models.SourceInflowinventory = /* values here */ ``` +### `models.SourceInsightful` + +```python +value: models.SourceInsightful = /* values here */ +``` + ### `models.SourceInsightly` ```python @@ -1085,6 +1325,12 @@ value: models.SourceInstatus = /* values here */ value: models.SourceIntercom = /* values here */ ``` +### `models.SourceIntruder` + +```python +value: models.SourceIntruder = /* values here */ +``` + ### `models.SourceInvoiced` ```python @@ -1109,6 +1355,12 @@ value: models.SourceIp2whois = /* values here */ value: models.SourceIterable = /* values here */ ``` +### `models.SourceJamfPro` + +```python +value: models.SourceJamfPro = /* values here */ +``` + ### `models.SourceJira` ```python @@ -1127,6 +1379,12 @@ value: models.SourceJobnimbus = /* values here */ value: models.SourceJotform = /* values here */ ``` +### `models.SourceJudgeMeReviews` + +```python +value: models.SourceJudgeMeReviews = /* values here */ +``` + ### `models.SourceJustSift` ```python @@ -1151,6 +1409,12 @@ value: models.SourceK6Cloud = /* values here */ value: models.SourceKatana = /* values here */ ``` +### `models.SourceKeka` + +```python +value: models.SourceKeka = /* values here */ +``` + ### `models.SourceKisi` ```python @@ -1223,6 +1487,12 @@ value: models.SourceLeverHiring = /* values here */ value: models.SourceLightspeedRetail = /* values here */ ``` +### `models.SourceLinear` + +```python +value: models.SourceLinear = /* values here */ +``` + ### `models.SourceLinkedinAds` ```python @@ -1277,6 +1547,12 @@ value: models.SourceMailchimp = /* values here */ value: models.SourceMailerlite = /* values here */ ``` +### `models.SourceMailersend` + +```python +value: models.SourceMailersend = /* values here */ +``` + ### `models.SourceMailgun` ```python @@ -1319,12 +1595,30 @@ value: models.SourceMarketo = /* values here */ value: models.SourceMarketstack = /* values here */ ``` +### `models.SourceMendeley` + +```python +value: models.SourceMendeley = /* values here */ +``` + ### `models.SourceMention` ```python value: models.SourceMention = /* values here */ ``` +### `models.SourceMercadoAds` + +```python +value: models.SourceMercadoAds = /* values here */ +``` + +### `models.SourceMerge` + +```python +value: models.SourceMerge = /* values here */ +``` + ### `models.SourceMetabase` ```python @@ -1445,24 +1739,60 @@ value: models.SourceN8n = /* values here */ value: models.SourceNasa = /* values here */ ``` +### `models.SourceNavan` + +```python +value: models.SourceNavan = /* values here */ +``` + +### `models.SourceNebiusAi` + +```python +value: models.SourceNebiusAi = /* values here */ +``` + ### `models.SourceNetsuite` ```python value: models.SourceNetsuite = /* values here */ ``` +### `models.SourceNetsuiteEnterprise` + +```python +value: models.SourceNetsuiteEnterprise = /* values here */ +``` + ### `models.SourceNewsAPI` ```python value: models.SourceNewsAPI = /* values here */ ``` +### `models.SourceNewsdata` + +```python +value: models.SourceNewsdata = /* values here */ +``` + ### `models.SourceNewsdataIo` ```python value: models.SourceNewsdataIo = /* values here */ ``` +### `models.SourceNexiopay` + +```python +value: models.SourceNexiopay = /* values here */ +``` + +### `models.SourceNinjaoneRmm` + +```python +value: models.SourceNinjaoneRmm = /* values here */ +``` + ### `models.SourceNocrm` ```python @@ -1541,6 +1871,12 @@ value: models.SourceOnfleet = /* values here */ value: models.SourceOpenDataDc = /* values here */ ``` +### `models.SourceOpenExchangeRates` + +```python +value: models.SourceOpenExchangeRates = /* values here */ +``` + ### `models.SourceOpenaq` ```python @@ -1571,22 +1907,28 @@ value: models.SourceOpinionStage = /* values here */ value: models.SourceOpsgenie = /* values here */ ``` +### `models.SourceOpuswatch` + +```python +value: models.SourceOpuswatch = /* values here */ +``` + ### `models.SourceOracle` ```python value: models.SourceOracle = /* values here */ ``` -### `models.SourceOrb` +### `models.SourceOracleEnterprise` ```python -value: models.SourceOrb = /* values here */ +value: models.SourceOracleEnterprise = /* values here */ ``` -### `models.SourceOrbit` +### `models.SourceOrb` ```python -value: models.SourceOrbit = /* values here */ +value: models.SourceOrb = /* values here */ ``` ### `models.SourceOura` @@ -1619,6 +1961,18 @@ value: models.SourceOveit = /* values here */ value: models.SourcePabblySubscriptionsBilling = /* values here */ ``` +### `models.SourcePaddle` + +```python +value: models.SourcePaddle = /* values here */ +``` + +### `models.SourcePagerduty` + +```python +value: models.SourcePagerduty = /* values here */ +``` + ### `models.SourcePandadoc` ```python @@ -1643,6 +1997,24 @@ value: models.SourcePapersign = /* values here */ value: models.SourcePardot = /* values here */ ``` +### `models.SourcePartnerize` + +```python +value: models.SourcePartnerize = /* values here */ +``` + +### `models.SourcePartnerstack` + +```python +value: models.SourcePartnerstack = /* values here */ +``` + +### `models.SourcePayfit` + +```python +value: models.SourcePayfit = /* values here */ +``` + ### `models.SourcePaypalTransaction` ```python @@ -1667,6 +2039,12 @@ value: models.SourcePendo = /* values here */ value: models.SourcePennylane = /* values here */ ``` +### `models.SourcePerigon` + +```python +value: models.SourcePerigon = /* values here */ +``` + ### `models.SourcePersistiq` ```python @@ -1685,12 +2063,24 @@ value: models.SourcePersona = /* values here */ value: models.SourcePexelsAPI = /* values here */ ``` +### `models.SourcePhyllo` + +```python +value: models.SourcePhyllo = /* values here */ +``` + ### `models.SourcePicqer` ```python value: models.SourcePicqer = /* values here */ ``` +### `models.SourcePingdom` + +```python +value: models.SourcePingdom = /* values here */ +``` + ### `models.SourcePinterest` ```python @@ -1757,6 +2147,12 @@ value: models.SourcePokeapi = /* values here */ value: models.SourcePolygonStockAPI = /* values here */ ``` +### `models.SourcePoplar` + +```python +value: models.SourcePoplar = /* values here */ +``` + ### `models.SourcePostgres` ```python @@ -1793,6 +2189,12 @@ value: models.SourcePretix = /* values here */ value: models.SourcePrimetric = /* values here */ ``` +### `models.SourcePrintify` + +```python +value: models.SourcePrintify = /* values here */ +``` + ### `models.SourceProductboard` ```python @@ -1895,6 +2297,12 @@ value: models.SourceRepairshopr = /* values here */ value: models.SourceReplyIo = /* values here */ ``` +### `models.SourceRetailexpressByMaropost` + +```python +value: models.SourceRetailexpressByMaropost = /* values here */ +``` + ### `models.SourceRetently` ```python @@ -1913,12 +2321,24 @@ value: models.SourceRevenuecat = /* values here */ value: models.SourceRevolutMerchant = /* values here */ ``` +### `models.SourceRingcentral` + +```python +value: models.SourceRingcentral = /* values here */ +``` + ### `models.SourceRkiCovid` ```python value: models.SourceRkiCovid = /* values here */ ``` +### `models.SourceRocketChat` + +```python +value: models.SourceRocketChat = /* values here */ +``` + ### `models.SourceRocketlane` ```python @@ -1991,6 +2411,12 @@ value: models.SourceSalesloft = /* values here */ value: models.SourceSapFieldglass = /* values here */ ``` +### `models.SourceSapHanaEnterprise` + +```python +value: models.SourceSapHanaEnterprise = /* values here */ +``` + ### `models.SourceSavvycal` ```python @@ -2051,6 +2477,18 @@ value: models.SourceSenseforce = /* values here */ value: models.SourceSentry = /* values here */ ``` +### `models.SourceSerpstat` + +```python +value: models.SourceSerpstat = /* values here */ +``` + +### `models.SourceServiceNow` + +```python +value: models.SourceServiceNow = /* values here */ +``` + ### `models.SourceSftp` ```python @@ -2063,6 +2501,12 @@ value: models.SourceSftp = /* values here */ value: models.SourceSftpBulk = /* values here */ ``` +### `models.SourceSharepointEnterprise` + +```python +value: models.SourceSharepointEnterprise = /* values here */ +``` + ### `models.SourceSharetribe` ```python @@ -2075,12 +2519,24 @@ value: models.SourceSharetribe = /* values here */ value: models.SourceShippo = /* values here */ ``` +### `models.SourceShipstation` + +```python +value: models.SourceShipstation = /* values here */ +``` + ### `models.SourceShopify` ```python value: models.SourceShopify = /* values here */ ``` +### `models.SourceShopwired` + +```python +value: models.SourceShopwired = /* values here */ +``` + ### `models.SourceShortcut` ```python @@ -2093,12 +2549,24 @@ value: models.SourceShortcut = /* values here */ value: models.SourceShortio = /* values here */ ``` +### `models.SourceShutterstock` + +```python +value: models.SourceShutterstock = /* values here */ +``` + ### `models.SourceSigmaComputing` ```python value: models.SourceSigmaComputing = /* values here */ ``` +### `models.SourceSignnow` + +```python +value: models.SourceSignnow = /* values here */ +``` + ### `models.SourceSimfin` ```python @@ -2195,6 +2663,12 @@ value: models.SourceSparkpost = /* values here */ value: models.SourceSplitIo = /* values here */ ``` +### `models.SourceSpotifyAds` + +```python +value: models.SourceSpotifyAds = /* values here */ +``` + ### `models.SourceSpotlercrm` ```python @@ -2261,6 +2735,12 @@ value: models.SourceSurveymonkey = /* values here */ value: models.SourceSurvicate = /* values here */ ``` +### `models.SourceSvix` + +```python +value: models.SourceSvix = /* values here */ +``` + ### `models.SourceSysteme` ```python @@ -2273,6 +2753,12 @@ value: models.SourceSysteme = /* values here */ value: models.SourceTaboola = /* values here */ ``` +### `models.SourceTavus` + +```python +value: models.SourceTavus = /* values here */ +``` + ### `models.SourceTeamtailor` ```python @@ -2309,6 +2795,18 @@ value: models.SourceTheGuardianAPI = /* values here */ value: models.SourceThinkific = /* values here */ ``` +### `models.SourceThinkificCourses` + +```python +value: models.SourceThinkificCourses = /* values here */ +``` + +### `models.SourceThriveLearning` + +```python +value: models.SourceThriveLearning = /* values here */ +``` + ### `models.SourceTicketmaster` ```python @@ -2339,12 +2837,24 @@ value: models.SourceTimely = /* values here */ value: models.SourceTinyemail = /* values here */ ``` +### `models.SourceTmdb` + +```python +value: models.SourceTmdb = /* values here */ +``` + ### `models.SourceTodoist` ```python value: models.SourceTodoist = /* values here */ ``` +### `models.SourceToggl` + +```python +value: models.SourceToggl = /* values here */ +``` + ### `models.SourceTrackPms` ```python @@ -2399,6 +2909,12 @@ value: models.SourceTwilioTaskrouter = /* values here */ value: models.SourceTwitter = /* values here */ ``` +### `models.SourceTyntecSms` + +```python +value: models.SourceTyntecSms = /* values here */ +``` + ### `models.SourceTypeform` ```python @@ -2423,6 +2939,12 @@ value: models.SourceUnleash = /* values here */ value: models.SourceUppromote = /* values here */ ``` +### `models.SourceUptick` + +```python +value: models.SourceUptick = /* values here */ +``` + ### `models.SourceUsCensus` ```python @@ -2483,6 +3005,12 @@ value: models.SourceWaiteraid = /* values here */ value: models.SourceWasabiStatsAPI = /* values here */ ``` +### `models.SourceWatchmode` + +```python +value: models.SourceWatchmode = /* values here */ +``` + ### `models.SourceWeatherstack` ```python @@ -2537,6 +3065,12 @@ value: models.SourceWordpress = /* values here */ value: models.SourceWorkable = /* values here */ ``` +### `models.SourceWorkday` + +```python +value: models.SourceWorkday = /* values here */ +``` + ### `models.SourceWorkflowmax` ```python @@ -2597,6 +3131,18 @@ value: models.SourceYotpo = /* values here */ value: models.SourceYouNeedABudgetYnab = /* values here */ ``` +### `models.SourceYounium` + +```python +value: models.SourceYounium = /* values here */ +``` + +### `models.SourceYousign` + +```python +value: models.SourceYousign = /* values here */ +``` + ### `models.SourceYoutubeAnalytics` ```python @@ -2615,6 +3161,12 @@ value: models.SourceYoutubeData = /* values here */ value: models.SourceZapierSupportedStorage = /* values here */ ``` +### `models.SourceZapsign` + +```python +value: models.SourceZapsign = /* values here */ +``` + ### `models.SourceZendeskChat` ```python diff --git a/docs/models/sourceconvertkit.md b/docs/models/sourceconvertkit.md index f8a123da..97eb4fd5 100644 --- a/docs/models/sourceconvertkit.md +++ b/docs/models/sourceconvertkit.md @@ -3,7 +3,8 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | -| `api_secret` | *str* | :heavy_check_mark: | API Secret | -| `source_type` | [models.Convertkit](../models/convertkit.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `credentials` | [models.AuthenticationType](../models/authenticationtype.md) | :heavy_check_mark: | N/A | +| `source_type` | [models.Convertkit](../models/convertkit.md) | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourceconvertkitauthtype.md b/docs/models/sourceconvertkitauthtype.md new file mode 100644 index 00000000..28395085 --- /dev/null +++ b/docs/models/sourceconvertkitauthtype.md @@ -0,0 +1,8 @@ +# SourceConvertkitAuthType + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `OAUTH2_0` | oauth2.0 | \ No newline at end of file diff --git a/docs/models/sourceconvertkitoauth20.md b/docs/models/sourceconvertkitoauth20.md new file mode 100644 index 00000000..964c8a1d --- /dev/null +++ b/docs/models/sourceconvertkitoauth20.md @@ -0,0 +1,13 @@ +# SourceConvertkitOAuth20 + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- | +| `client_id` | *str* | :heavy_check_mark: | The client ID of your OAuth application. | +| `client_secret` | *str* | :heavy_check_mark: | The client secret of your OAuth application. | +| `refresh_token` | *str* | :heavy_check_mark: | A current, non-expired refresh token genereted using the provided client ID and secret. | +| `access_token` | *Optional[str]* | :heavy_minus_sign: | An access token generated using the provided client information and refresh token. | +| `auth_type` | [models.SourceConvertkitAuthType](../models/sourceconvertkitauthtype.md) | :heavy_check_mark: | N/A | +| `expires_at` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The time at which the current access token is set to expire | \ No newline at end of file diff --git a/docs/models/sourceconvertkitschemasauthtype.md b/docs/models/sourceconvertkitschemasauthtype.md new file mode 100644 index 00000000..05534ad5 --- /dev/null +++ b/docs/models/sourceconvertkitschemasauthtype.md @@ -0,0 +1,8 @@ +# SourceConvertkitSchemasAuthType + + +## Values + +| Name | Value | +| --------- | --------- | +| `API_KEY` | api_key | \ No newline at end of file diff --git a/docs/models/sourcecouchbase.md b/docs/models/sourcecouchbase.md new file mode 100644 index 00000000..970e6b6c --- /dev/null +++ b/docs/models/sourcecouchbase.md @@ -0,0 +1,13 @@ +# SourceCouchbase + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `bucket` | *str* | :heavy_check_mark: | The name of the bucket to sync data from | +| `connection_string` | *str* | :heavy_check_mark: | The connection string for the Couchbase server (e.g., couchbase://localhost or couchbases://example.com) | +| `password` | *str* | :heavy_check_mark: | The password to use for authentication | +| `username` | *str* | :heavy_check_mark: | The username to use for authentication | +| `source_type` | [models.Couchbase](../models/couchbase.md) | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date from which you'd like to replicate data for incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. If not set, all data will be replicated. | \ No newline at end of file diff --git a/docs/models/sourcecreaterequest.md b/docs/models/sourcecreaterequest.md index ccc26c16..01d613fe 100644 --- a/docs/models/sourcecreaterequest.md +++ b/docs/models/sourcecreaterequest.md @@ -3,10 +3,11 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | -| `configuration` | [models.SourceConfiguration](../models/sourceconfiguration.md) | :heavy_check_mark: | The values required to configure the source. | {
"user": "charles"
} | -| `name` | *str* | :heavy_check_mark: | Name of the source e.g. dev-mysql-instance. | | -| `workspace_id` | *str* | :heavy_check_mark: | N/A | | -| `definition_id` | *Optional[str]* | :heavy_minus_sign: | The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. | | -| `secret_id` | *Optional[str]* | :heavy_minus_sign: | Optional secretID obtained through the OAuth redirect flow. | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `configuration` | [models.SourceConfiguration](../models/sourceconfiguration.md) | :heavy_check_mark: | The values required to configure the source. | {
"user": "charles"
} | +| `name` | *str* | :heavy_check_mark: | Name of the source e.g. dev-mysql-instance. | | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | | +| `definition_id` | *Optional[str]* | :heavy_minus_sign: | The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. | | +| `resource_allocation` | [Optional[models.ScopedResourceRequirements]](../models/scopedresourcerequirements.md) | :heavy_minus_sign: | actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level. | | +| `secret_id` | *Optional[str]* | :heavy_minus_sign: | Optional secretID obtained through the OAuth redirect flow. | | \ No newline at end of file diff --git a/docs/models/sourcecustomerio.md b/docs/models/sourcecustomerio.md index befebe88..9b6261b7 100644 --- a/docs/models/sourcecustomerio.md +++ b/docs/models/sourcecustomerio.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | -| `app_api_key` | *str* | :heavy_check_mark: | N/A | -| `source_type` | [models.CustomerIo](../models/customerio.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | +| `app_api_key` | *str* | :heavy_check_mark: | N/A | +| `source_type` | [models.SourceCustomerIoCustomerIo](../models/sourcecustomeriocustomerio.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcecustomeriocustomerio.md b/docs/models/sourcecustomeriocustomerio.md new file mode 100644 index 00000000..97c2bd75 --- /dev/null +++ b/docs/models/sourcecustomeriocustomerio.md @@ -0,0 +1,8 @@ +# SourceCustomerIoCustomerIo + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `CUSTOMER_IO` | customer-io | \ No newline at end of file diff --git a/docs/models/sourcecustomerly.md b/docs/models/sourcecustomerly.md new file mode 100644 index 00000000..52e68dbe --- /dev/null +++ b/docs/models/sourcecustomerly.md @@ -0,0 +1,9 @@ +# SourceCustomerly + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `source_type` | [models.Customerly](../models/customerly.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcedingconnect.md b/docs/models/sourcedingconnect.md new file mode 100644 index 00000000..13d302d5 --- /dev/null +++ b/docs/models/sourcedingconnect.md @@ -0,0 +1,11 @@ +# SourceDingConnect + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your API key for authenticating with the DingConnect API. You can generate this key by navigating to the Developer tab in the Account Settings section of your DingConnect account. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `x_correlation_id` | *Optional[str]* | :heavy_minus_sign: | Optional header to correlate HTTP requests between a client and server. | +| `source_type` | [models.DingConnect](../models/dingconnect.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/hivecataloguseapachehivemetastore.md b/docs/models/sourcedocuseal.md similarity index 66% rename from docs/models/hivecataloguseapachehivemetastore.md rename to docs/models/sourcedocuseal.md index 57204d38..5477c059 100644 --- a/docs/models/hivecataloguseapachehivemetastore.md +++ b/docs/models/sourcedocuseal.md @@ -1,10 +1,11 @@ -# HiveCatalogUseApacheHiveMetaStore +# SourceDocuseal ## Fields -| Field | Type | Required | Description | Example | -| ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | -| `hive_thrift_uri` | *str* | :heavy_check_mark: | Hive MetaStore thrift server uri of iceberg catalog. | host:port | -| `catalog_type` | [Optional[models.CatalogType]](../models/catalogtype.md) | :heavy_minus_sign: | N/A | | -| `database` | *Optional[str]* | :heavy_minus_sign: | The default database tables are written to if the source does not specify a namespace. The usual value for this field is "default". | default | \ No newline at end of file +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your API key for authenticating with the DocuSeal API. Obtain it from the DocuSeal API Console at https://console.docuseal.com/api. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `limit` | *Optional[str]* | :heavy_minus_sign: | The pagination limit | +| `source_type` | [models.Docuseal](../models/docuseal.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcedolibarr.md b/docs/models/sourcedolibarr.md new file mode 100644 index 00000000..601b2860 --- /dev/null +++ b/docs/models/sourcedolibarr.md @@ -0,0 +1,11 @@ +# SourceDolibarr + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `my_dolibarr_domain_url` | *str* | :heavy_check_mark: | enter your "domain/dolibarr_url" without https:// Example: mydomain.com/dolibarr | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Dolibarr](../models/dolibarr.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcedwolla.md b/docs/models/sourcedwolla.md new file mode 100644 index 00000000..ac37d755 --- /dev/null +++ b/docs/models/sourcedwolla.md @@ -0,0 +1,12 @@ +# SourceDwolla + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | +| `client_id` | *str* | :heavy_check_mark: | N/A | +| `client_secret` | *str* | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `environment` | [Optional[models.SourceDwollaEnvironment]](../models/sourcedwollaenvironment.md) | :heavy_minus_sign: | The environment for the Dwolla API, either 'api-sandbox' or 'api'. | +| `source_type` | [models.Dwolla](../models/dwolla.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcedwollaenvironment.md b/docs/models/sourcedwollaenvironment.md new file mode 100644 index 00000000..10d737ae --- /dev/null +++ b/docs/models/sourcedwollaenvironment.md @@ -0,0 +1,11 @@ +# SourceDwollaEnvironment + +The environment for the Dwolla API, either 'api-sandbox' or 'api'. + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `API` | api | +| `API_SANDBOX` | api-sandbox | \ No newline at end of file diff --git a/docs/models/sourceebayfinance.md b/docs/models/sourceebayfinance.md new file mode 100644 index 00000000..1d985fed --- /dev/null +++ b/docs/models/sourceebayfinance.md @@ -0,0 +1,15 @@ +# SourceEbayFinance + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | +| `redirect_uri` | *str* | :heavy_check_mark: | N/A | +| `refresh_token` | *str* | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `username` | *str* | :heavy_check_mark: | Ebay Developer Client ID | +| `api_host` | [Optional[models.APIHost]](../models/apihost.md) | :heavy_minus_sign: | https://apiz.sandbox.ebay.com for sandbox & https://apiz.ebay.com for production | +| `password` | *Optional[str]* | :heavy_minus_sign: | Ebay Client Secret | +| `source_type` | [models.EbayFinance](../models/ebayfinance.md) | :heavy_check_mark: | N/A | +| `token_refresh_endpoint` | [Optional[models.RefreshTokenEndpoint]](../models/refreshtokenendpoint.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourceebayfulfillment.md b/docs/models/sourceebayfulfillment.md new file mode 100644 index 00000000..792bb12e --- /dev/null +++ b/docs/models/sourceebayfulfillment.md @@ -0,0 +1,15 @@ +# SourceEbayFulfillment + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | +| `password` | *str* | :heavy_check_mark: | N/A | +| `redirect_uri` | *str* | :heavy_check_mark: | N/A | +| `refresh_token` | *str* | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `username` | *str* | :heavy_check_mark: | N/A | +| `api_host` | [Optional[models.SourceEbayFulfillmentAPIHost]](../models/sourceebayfulfillmentapihost.md) | :heavy_minus_sign: | N/A | +| `refresh_token_endpoint` | [Optional[models.SourceEbayFulfillmentRefreshTokenEndpoint]](../models/sourceebayfulfillmentrefreshtokenendpoint.md) | :heavy_minus_sign: | N/A | +| `source_type` | [models.EbayFulfillment](../models/ebayfulfillment.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceebayfulfillmentapihost.md b/docs/models/sourceebayfulfillmentapihost.md new file mode 100644 index 00000000..c14123ae --- /dev/null +++ b/docs/models/sourceebayfulfillmentapihost.md @@ -0,0 +1,9 @@ +# SourceEbayFulfillmentAPIHost + + +## Values + +| Name | Value | +| ---------------------------- | ---------------------------- | +| `HTTPS_API_EBAY_COM` | https://api.ebay.com | +| `HTTPS_API_SANDBOX_EBAY_COM` | https://api.sandbox.ebay.com | \ No newline at end of file diff --git a/docs/models/sourceebayfulfillmentrefreshtokenendpoint.md b/docs/models/sourceebayfulfillmentrefreshtokenendpoint.md new file mode 100644 index 00000000..611306b1 --- /dev/null +++ b/docs/models/sourceebayfulfillmentrefreshtokenendpoint.md @@ -0,0 +1,9 @@ +# SourceEbayFulfillmentRefreshTokenEndpoint + + +## Values + +| Name | Value | +| ----------------------------------------------------- | ----------------------------------------------------- | +| `HTTPS_API_EBAY_COM_IDENTITY_V1_OAUTH2_TOKEN` | https://api.ebay.com/identity/v1/oauth2/token | +| `HTTPS_API_SANDBOX_EBAY_COM_IDENTITY_V1_OAUTH2_TOKEN` | https://api.sandbox.ebay.com/identity/v1/oauth2/token | \ No newline at end of file diff --git a/docs/models/sourceelasticsearch.md b/docs/models/sourceelasticsearch.md new file mode 100644 index 00000000..fa226c98 --- /dev/null +++ b/docs/models/sourceelasticsearch.md @@ -0,0 +1,10 @@ +# SourceElasticsearch + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | +| `endpoint` | *str* | :heavy_check_mark: | The full url of the Elasticsearch server | +| `authentication_method` | [Optional[models.SourceElasticsearchAuthenticationMethod]](../models/sourceelasticsearchauthenticationmethod.md) | :heavy_minus_sign: | The type of authentication to be used | +| `source_type` | [models.SourceElasticsearchElasticsearch](../models/sourceelasticsearchelasticsearch.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceelasticsearchapikeysecret.md b/docs/models/sourceelasticsearchapikeysecret.md new file mode 100644 index 00000000..01b61469 --- /dev/null +++ b/docs/models/sourceelasticsearchapikeysecret.md @@ -0,0 +1,13 @@ +# SourceElasticsearchAPIKeySecret + +Use a api key and secret combination to authenticate + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | +| `api_key_id` | *str* | :heavy_check_mark: | The Key ID to used when accessing an enterprise Elasticsearch instance. | +| `api_key_secret` | *str* | :heavy_check_mark: | The secret associated with the API Key ID. | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `method` | [models.SourceElasticsearchSchemasMethod](../models/sourceelasticsearchschemasmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceelasticsearchauthenticationmethod.md b/docs/models/sourceelasticsearchauthenticationmethod.md new file mode 100644 index 00000000..16926be3 --- /dev/null +++ b/docs/models/sourceelasticsearchauthenticationmethod.md @@ -0,0 +1,25 @@ +# SourceElasticsearchAuthenticationMethod + +The type of authentication to be used + + +## Supported Types + +### `models.SourceElasticsearchNone` + +```python +value: models.SourceElasticsearchNone = /* values here */ +``` + +### `models.SourceElasticsearchAPIKeySecret` + +```python +value: models.SourceElasticsearchAPIKeySecret = /* values here */ +``` + +### `models.SourceElasticsearchUsernamePassword` + +```python +value: models.SourceElasticsearchUsernamePassword = /* values here */ +``` + diff --git a/docs/models/sourceelasticsearchelasticsearch.md b/docs/models/sourceelasticsearchelasticsearch.md new file mode 100644 index 00000000..c3389ebe --- /dev/null +++ b/docs/models/sourceelasticsearchelasticsearch.md @@ -0,0 +1,8 @@ +# SourceElasticsearchElasticsearch + + +## Values + +| Name | Value | +| --------------- | --------------- | +| `ELASTICSEARCH` | elasticsearch | \ No newline at end of file diff --git a/docs/models/sourceelasticsearchmethod.md b/docs/models/sourceelasticsearchmethod.md new file mode 100644 index 00000000..5e64d732 --- /dev/null +++ b/docs/models/sourceelasticsearchmethod.md @@ -0,0 +1,8 @@ +# SourceElasticsearchMethod + + +## Values + +| Name | Value | +| ------ | ------ | +| `NONE` | none | \ No newline at end of file diff --git a/docs/models/sourceelasticsearchnone.md b/docs/models/sourceelasticsearchnone.md new file mode 100644 index 00000000..e20cd467 --- /dev/null +++ b/docs/models/sourceelasticsearchnone.md @@ -0,0 +1,11 @@ +# SourceElasticsearchNone + +No authentication will be used + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `method` | [models.SourceElasticsearchMethod](../models/sourceelasticsearchmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceelasticsearchschemasauthenticationmethodmethod.md b/docs/models/sourceelasticsearchschemasauthenticationmethodmethod.md new file mode 100644 index 00000000..9c78c641 --- /dev/null +++ b/docs/models/sourceelasticsearchschemasauthenticationmethodmethod.md @@ -0,0 +1,8 @@ +# SourceElasticsearchSchemasAuthenticationMethodMethod + + +## Values + +| Name | Value | +| ------- | ------- | +| `BASIC` | basic | \ No newline at end of file diff --git a/docs/models/sourceelasticsearchschemasmethod.md b/docs/models/sourceelasticsearchschemasmethod.md new file mode 100644 index 00000000..fbd3edc9 --- /dev/null +++ b/docs/models/sourceelasticsearchschemasmethod.md @@ -0,0 +1,8 @@ +# SourceElasticsearchSchemasMethod + + +## Values + +| Name | Value | +| -------- | -------- | +| `SECRET` | secret | \ No newline at end of file diff --git a/docs/models/sourceelasticsearchusernamepassword.md b/docs/models/sourceelasticsearchusernamepassword.md new file mode 100644 index 00000000..a0e90a3a --- /dev/null +++ b/docs/models/sourceelasticsearchusernamepassword.md @@ -0,0 +1,13 @@ +# SourceElasticsearchUsernamePassword + +Basic auth header with a username and password + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | +| `password` | *str* | :heavy_check_mark: | Basic auth password to access a secure Elasticsearch server | +| `username` | *str* | :heavy_check_mark: | Basic auth username to access a secure Elasticsearch server | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `method` | [models.SourceElasticsearchSchemasAuthenticationMethodMethod](../models/sourceelasticsearchschemasauthenticationmethodmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceeverhour.md b/docs/models/sourceeverhour.md new file mode 100644 index 00000000..7125f7ea --- /dev/null +++ b/docs/models/sourceeverhour.md @@ -0,0 +1,9 @@ +# SourceEverhour + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `api_key` | *str* | :heavy_check_mark: | Everhour API Key. See the docs for information on how to generate this key. | +| `source_type` | [models.Everhour](../models/everhour.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcefacebookmarketingactionreporttime.md b/docs/models/sourcefacebookmarketingactionreporttime.md deleted file mode 100644 index 259fa78a..00000000 --- a/docs/models/sourcefacebookmarketingactionreporttime.md +++ /dev/null @@ -1,12 +0,0 @@ -# SourceFacebookMarketingActionReportTime - -Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd. - - -## Values - -| Name | Value | -| ------------ | ------------ | -| `CONVERSION` | conversion | -| `IMPRESSION` | impression | -| `MIXED` | mixed | \ No newline at end of file diff --git a/docs/models/sourcefacebookmarketingauthentication.md b/docs/models/sourcefacebookmarketingauthentication.md index 37d23c86..f1e22095 100644 --- a/docs/models/sourcefacebookmarketingauthentication.md +++ b/docs/models/sourcefacebookmarketingauthentication.md @@ -11,9 +11,9 @@ Credentials for connecting to the Facebook Marketing API value: models.AuthenticateViaFacebookMarketingOauth = /* values here */ ``` -### `models.ServiceAccountKeyAuthentication` +### `models.SourceFacebookMarketingServiceAccountKeyAuthentication` ```python -value: models.ServiceAccountKeyAuthentication = /* values here */ +value: models.SourceFacebookMarketingServiceAccountKeyAuthentication = /* values here */ ``` diff --git a/docs/models/sourcefacebookmarketingserviceaccountkeyauthentication.md b/docs/models/sourcefacebookmarketingserviceaccountkeyauthentication.md new file mode 100644 index 00000000..65b9bc72 --- /dev/null +++ b/docs/models/sourcefacebookmarketingserviceaccountkeyauthentication.md @@ -0,0 +1,9 @@ +# SourceFacebookMarketingServiceAccountKeyAuthentication + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `access_token` | *str* | :heavy_check_mark: | The value of the generated access token. From your App’s Dashboard, click on "Marketing API" then "Tools". Select permissions ads_management, ads_read, read_insights, business_management. Then click on "Get token". See the docs for more information. | +| `auth_type` | [Optional[models.SourceFacebookMarketingSchemasAuthType]](../models/sourcefacebookmarketingschemasauthtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcefacebookmarketingvalidenums.md b/docs/models/sourcefacebookmarketingvalidenums.md index 69dafae2..47b5a4f8 100644 --- a/docs/models/sourcefacebookmarketingvalidenums.md +++ b/docs/models/sourcefacebookmarketingvalidenums.md @@ -5,136 +5,146 @@ An enumeration. ## Values -| Name | Value | -| ----------------------------------------------- | ----------------------------------------------- | -| `ACCOUNT_CURRENCY` | account_currency | -| `ACCOUNT_ID` | account_id | -| `ACCOUNT_NAME` | account_name | -| `ACTION_VALUES` | action_values | -| `ACTIONS` | actions | -| `AD_CLICK_ACTIONS` | ad_click_actions | -| `AD_ID` | ad_id | -| `AD_IMPRESSION_ACTIONS` | ad_impression_actions | -| `AD_NAME` | ad_name | -| `ADSET_END` | adset_end | -| `ADSET_ID` | adset_id | -| `ADSET_NAME` | adset_name | -| `AGE_TARGETING` | age_targeting | -| `ATTRIBUTION_SETTING` | attribution_setting | -| `AUCTION_BID` | auction_bid | -| `AUCTION_COMPETITIVENESS` | auction_competitiveness | -| `AUCTION_MAX_COMPETITOR_BID` | auction_max_competitor_bid | -| `BUYING_TYPE` | buying_type | -| `CAMPAIGN_ID` | campaign_id | -| `CAMPAIGN_NAME` | campaign_name | -| `CANVAS_AVG_VIEW_PERCENT` | canvas_avg_view_percent | -| `CANVAS_AVG_VIEW_TIME` | canvas_avg_view_time | -| `CATALOG_SEGMENT_ACTIONS` | catalog_segment_actions | -| `CATALOG_SEGMENT_VALUE` | catalog_segment_value | -| `CATALOG_SEGMENT_VALUE_MOBILE_PURCHASE_ROAS` | catalog_segment_value_mobile_purchase_roas | -| `CATALOG_SEGMENT_VALUE_OMNI_PURCHASE_ROAS` | catalog_segment_value_omni_purchase_roas | -| `CATALOG_SEGMENT_VALUE_WEBSITE_PURCHASE_ROAS` | catalog_segment_value_website_purchase_roas | -| `CLICKS` | clicks | -| `CONVERSION_RATE_RANKING` | conversion_rate_ranking | -| `CONVERSION_VALUES` | conversion_values | -| `CONVERSIONS` | conversions | -| `CONVERTED_PRODUCT_QUANTITY` | converted_product_quantity | -| `CONVERTED_PRODUCT_VALUE` | converted_product_value | -| `COST_PER_15_SEC_VIDEO_VIEW` | cost_per_15_sec_video_view | -| `COST_PER_2_SEC_CONTINUOUS_VIDEO_VIEW` | cost_per_2_sec_continuous_video_view | -| `COST_PER_ACTION_TYPE` | cost_per_action_type | -| `COST_PER_AD_CLICK` | cost_per_ad_click | -| `COST_PER_CONVERSION` | cost_per_conversion | -| `COST_PER_DDA_COUNTBY_CONVS` | cost_per_dda_countby_convs | -| `COST_PER_ESTIMATED_AD_RECALLERS` | cost_per_estimated_ad_recallers | -| `COST_PER_INLINE_LINK_CLICK` | cost_per_inline_link_click | -| `COST_PER_INLINE_POST_ENGAGEMENT` | cost_per_inline_post_engagement | -| `COST_PER_ONE_THOUSAND_AD_IMPRESSION` | cost_per_one_thousand_ad_impression | -| `COST_PER_OUTBOUND_CLICK` | cost_per_outbound_click | -| `COST_PER_THRUPLAY` | cost_per_thruplay | -| `COST_PER_UNIQUE_ACTION_TYPE` | cost_per_unique_action_type | -| `COST_PER_UNIQUE_CLICK` | cost_per_unique_click | -| `COST_PER_UNIQUE_CONVERSION` | cost_per_unique_conversion | -| `COST_PER_UNIQUE_INLINE_LINK_CLICK` | cost_per_unique_inline_link_click | -| `COST_PER_UNIQUE_OUTBOUND_CLICK` | cost_per_unique_outbound_click | -| `CPC` | cpc | -| `CPM` | cpm | -| `CPP` | cpp | -| `CREATED_TIME` | created_time | -| `CREATIVE_MEDIA_TYPE` | creative_media_type | -| `CTR` | ctr | -| `DATE_START` | date_start | -| `DATE_STOP` | date_stop | -| `DDA_COUNTBY_CONVS` | dda_countby_convs | -| `DDA_RESULTS` | dda_results | -| `ENGAGEMENT_RATE_RANKING` | engagement_rate_ranking | -| `ESTIMATED_AD_RECALL_RATE` | estimated_ad_recall_rate | -| `ESTIMATED_AD_RECALL_RATE_LOWER_BOUND` | estimated_ad_recall_rate_lower_bound | -| `ESTIMATED_AD_RECALL_RATE_UPPER_BOUND` | estimated_ad_recall_rate_upper_bound | -| `ESTIMATED_AD_RECALLERS` | estimated_ad_recallers | -| `ESTIMATED_AD_RECALLERS_LOWER_BOUND` | estimated_ad_recallers_lower_bound | -| `ESTIMATED_AD_RECALLERS_UPPER_BOUND` | estimated_ad_recallers_upper_bound | -| `FREQUENCY` | frequency | -| `FULL_VIEW_IMPRESSIONS` | full_view_impressions | -| `FULL_VIEW_REACH` | full_view_reach | -| `GENDER_TARGETING` | gender_targeting | -| `IMPRESSIONS` | impressions | -| `INLINE_LINK_CLICK_CTR` | inline_link_click_ctr | -| `INLINE_LINK_CLICKS` | inline_link_clicks | -| `INLINE_POST_ENGAGEMENT` | inline_post_engagement | -| `INSTAGRAM_UPCOMING_EVENT_REMINDERS_SET` | instagram_upcoming_event_reminders_set | -| `INSTANT_EXPERIENCE_CLICKS_TO_OPEN` | instant_experience_clicks_to_open | -| `INSTANT_EXPERIENCE_CLICKS_TO_START` | instant_experience_clicks_to_start | -| `INSTANT_EXPERIENCE_OUTBOUND_CLICKS` | instant_experience_outbound_clicks | -| `INTERACTIVE_COMPONENT_TAP` | interactive_component_tap | -| `LABELS` | labels | -| `LOCATION` | location | -| `MARKETING_MESSAGES_COST_PER_DELIVERED` | marketing_messages_cost_per_delivered | -| `MARKETING_MESSAGES_COST_PER_LINK_BTN_CLICK` | marketing_messages_cost_per_link_btn_click | -| `MARKETING_MESSAGES_SPEND` | marketing_messages_spend | -| `MOBILE_APP_PURCHASE_ROAS` | mobile_app_purchase_roas | -| `OBJECTIVE` | objective | -| `OPTIMIZATION_GOAL` | optimization_goal | -| `OUTBOUND_CLICKS` | outbound_clicks | -| `OUTBOUND_CLICKS_CTR` | outbound_clicks_ctr | -| `PLACE_PAGE_NAME` | place_page_name | -| `PURCHASE_ROAS` | purchase_roas | -| `QUALIFYING_QUESTION_QUALIFY_ANSWER_RATE` | qualifying_question_qualify_answer_rate | -| `QUALITY_RANKING` | quality_ranking | -| `REACH` | reach | -| `SOCIAL_SPEND` | social_spend | -| `SPEND` | spend | -| `TOTAL_POSTBACKS` | total_postbacks | -| `TOTAL_POSTBACKS_DETAILED` | total_postbacks_detailed | -| `TOTAL_POSTBACKS_DETAILED_V4` | total_postbacks_detailed_v4 | -| `UNIQUE_ACTIONS` | unique_actions | -| `UNIQUE_CLICKS` | unique_clicks | -| `UNIQUE_CONVERSIONS` | unique_conversions | -| `UNIQUE_CTR` | unique_ctr | -| `UNIQUE_INLINE_LINK_CLICK_CTR` | unique_inline_link_click_ctr | -| `UNIQUE_INLINE_LINK_CLICKS` | unique_inline_link_clicks | -| `UNIQUE_LINK_CLICKS_CTR` | unique_link_clicks_ctr | -| `UNIQUE_OUTBOUND_CLICKS` | unique_outbound_clicks | -| `UNIQUE_OUTBOUND_CLICKS_CTR` | unique_outbound_clicks_ctr | -| `UNIQUE_VIDEO_CONTINUOUS_2_SEC_WATCHED_ACTIONS` | unique_video_continuous_2_sec_watched_actions | -| `UNIQUE_VIDEO_VIEW_15_SEC` | unique_video_view_15_sec | -| `UPDATED_TIME` | updated_time | -| `VIDEO_15_SEC_WATCHED_ACTIONS` | video_15_sec_watched_actions | -| `VIDEO_30_SEC_WATCHED_ACTIONS` | video_30_sec_watched_actions | -| `VIDEO_AVG_TIME_WATCHED_ACTIONS` | video_avg_time_watched_actions | -| `VIDEO_CONTINUOUS_2_SEC_WATCHED_ACTIONS` | video_continuous_2_sec_watched_actions | -| `VIDEO_P100_WATCHED_ACTIONS` | video_p100_watched_actions | -| `VIDEO_P25_WATCHED_ACTIONS` | video_p25_watched_actions | -| `VIDEO_P50_WATCHED_ACTIONS` | video_p50_watched_actions | -| `VIDEO_P75_WATCHED_ACTIONS` | video_p75_watched_actions | -| `VIDEO_P95_WATCHED_ACTIONS` | video_p95_watched_actions | -| `VIDEO_PLAY_ACTIONS` | video_play_actions | -| `VIDEO_PLAY_CURVE_ACTIONS` | video_play_curve_actions | -| `VIDEO_PLAY_RETENTION_0_TO_15S_ACTIONS` | video_play_retention_0_to_15s_actions | -| `VIDEO_PLAY_RETENTION_20_TO_60S_ACTIONS` | video_play_retention_20_to_60s_actions | -| `VIDEO_PLAY_RETENTION_GRAPH_ACTIONS` | video_play_retention_graph_actions | -| `VIDEO_THRUPLAY_WATCHED_ACTIONS` | video_thruplay_watched_actions | -| `VIDEO_TIME_WATCHED_ACTIONS` | video_time_watched_actions | -| `WEBSITE_CTR` | website_ctr | -| `WEBSITE_PURCHASE_ROAS` | website_purchase_roas | -| `WISH_BID` | wish_bid | \ No newline at end of file +| Name | Value | +| ------------------------------------------------------- | ------------------------------------------------------- | +| `ACCOUNT_CURRENCY` | account_currency | +| `ACCOUNT_ID` | account_id | +| `ACCOUNT_NAME` | account_name | +| `ACTION_VALUES` | action_values | +| `ACTIONS` | actions | +| `AD_CLICK_ACTIONS` | ad_click_actions | +| `AD_ID` | ad_id | +| `AD_IMPRESSION_ACTIONS` | ad_impression_actions | +| `AD_NAME` | ad_name | +| `ADSET_END` | adset_end | +| `ADSET_ID` | adset_id | +| `ADSET_NAME` | adset_name | +| `AGE_TARGETING` | age_targeting | +| `ATTRIBUTION_SETTING` | attribution_setting | +| `AUCTION_BID` | auction_bid | +| `AUCTION_COMPETITIVENESS` | auction_competitiveness | +| `AUCTION_MAX_COMPETITOR_BID` | auction_max_competitor_bid | +| `AVERAGE_PURCHASES_CONVERSION_VALUE` | average_purchases_conversion_value | +| `BUYING_TYPE` | buying_type | +| `CAMPAIGN_ID` | campaign_id | +| `CAMPAIGN_NAME` | campaign_name | +| `CANVAS_AVG_VIEW_PERCENT` | canvas_avg_view_percent | +| `CANVAS_AVG_VIEW_TIME` | canvas_avg_view_time | +| `CATALOG_SEGMENT_ACTIONS` | catalog_segment_actions | +| `CATALOG_SEGMENT_VALUE` | catalog_segment_value | +| `CATALOG_SEGMENT_VALUE_MOBILE_PURCHASE_ROAS` | catalog_segment_value_mobile_purchase_roas | +| `CATALOG_SEGMENT_VALUE_OMNI_PURCHASE_ROAS` | catalog_segment_value_omni_purchase_roas | +| `CATALOG_SEGMENT_VALUE_WEBSITE_PURCHASE_ROAS` | catalog_segment_value_website_purchase_roas | +| `CLICKS` | clicks | +| `CONVERSION_RATE_RANKING` | conversion_rate_ranking | +| `CONVERSION_VALUES` | conversion_values | +| `CONVERSIONS` | conversions | +| `CONVERTED_PRODUCT_QUANTITY` | converted_product_quantity | +| `CONVERTED_PRODUCT_VALUE` | converted_product_value | +| `COST_PER_15_SEC_VIDEO_VIEW` | cost_per_15_sec_video_view | +| `COST_PER_2_SEC_CONTINUOUS_VIDEO_VIEW` | cost_per_2_sec_continuous_video_view | +| `COST_PER_ACTION_TYPE` | cost_per_action_type | +| `COST_PER_AD_CLICK` | cost_per_ad_click | +| `COST_PER_CONVERSION` | cost_per_conversion | +| `COST_PER_DDA_COUNTBY_CONVS` | cost_per_dda_countby_convs | +| `COST_PER_ESTIMATED_AD_RECALLERS` | cost_per_estimated_ad_recallers | +| `COST_PER_INLINE_LINK_CLICK` | cost_per_inline_link_click | +| `COST_PER_INLINE_POST_ENGAGEMENT` | cost_per_inline_post_engagement | +| `COST_PER_ONE_THOUSAND_AD_IMPRESSION` | cost_per_one_thousand_ad_impression | +| `COST_PER_OUTBOUND_CLICK` | cost_per_outbound_click | +| `COST_PER_THRUPLAY` | cost_per_thruplay | +| `COST_PER_UNIQUE_ACTION_TYPE` | cost_per_unique_action_type | +| `COST_PER_UNIQUE_CLICK` | cost_per_unique_click | +| `COST_PER_UNIQUE_CONVERSION` | cost_per_unique_conversion | +| `COST_PER_UNIQUE_INLINE_LINK_CLICK` | cost_per_unique_inline_link_click | +| `COST_PER_UNIQUE_OUTBOUND_CLICK` | cost_per_unique_outbound_click | +| `CPC` | cpc | +| `CPM` | cpm | +| `CPP` | cpp | +| `CREATED_TIME` | created_time | +| `CREATIVE_MEDIA_TYPE` | creative_media_type | +| `CTR` | ctr | +| `DATE_START` | date_start | +| `DATE_STOP` | date_stop | +| `DDA_COUNTBY_CONVS` | dda_countby_convs | +| `DDA_RESULTS` | dda_results | +| `ENGAGEMENT_RATE_RANKING` | engagement_rate_ranking | +| `ESTIMATED_AD_RECALL_RATE` | estimated_ad_recall_rate | +| `ESTIMATED_AD_RECALL_RATE_LOWER_BOUND` | estimated_ad_recall_rate_lower_bound | +| `ESTIMATED_AD_RECALL_RATE_UPPER_BOUND` | estimated_ad_recall_rate_upper_bound | +| `ESTIMATED_AD_RECALLERS` | estimated_ad_recallers | +| `ESTIMATED_AD_RECALLERS_LOWER_BOUND` | estimated_ad_recallers_lower_bound | +| `ESTIMATED_AD_RECALLERS_UPPER_BOUND` | estimated_ad_recallers_upper_bound | +| `FREQUENCY` | frequency | +| `FULL_VIEW_IMPRESSIONS` | full_view_impressions | +| `FULL_VIEW_REACH` | full_view_reach | +| `GENDER_TARGETING` | gender_targeting | +| `IMPRESSIONS` | impressions | +| `INLINE_LINK_CLICK_CTR` | inline_link_click_ctr | +| `INLINE_LINK_CLICKS` | inline_link_clicks | +| `INLINE_POST_ENGAGEMENT` | inline_post_engagement | +| `INSTAGRAM_UPCOMING_EVENT_REMINDERS_SET` | instagram_upcoming_event_reminders_set | +| `INSTANT_EXPERIENCE_CLICKS_TO_OPEN` | instant_experience_clicks_to_open | +| `INSTANT_EXPERIENCE_CLICKS_TO_START` | instant_experience_clicks_to_start | +| `INSTANT_EXPERIENCE_OUTBOUND_CLICKS` | instant_experience_outbound_clicks | +| `INTERACTIVE_COMPONENT_TAP` | interactive_component_tap | +| `LABELS` | labels | +| `LOCATION` | location | +| `MARKETING_MESSAGES_COST_PER_DELIVERED` | marketing_messages_cost_per_delivered | +| `MARKETING_MESSAGES_COST_PER_LINK_BTN_CLICK` | marketing_messages_cost_per_link_btn_click | +| `MARKETING_MESSAGES_DELIVERY_RATE` | marketing_messages_delivery_rate | +| `MARKETING_MESSAGES_LINK_BTN_CLICK_RATE` | marketing_messages_link_btn_click_rate | +| `MARKETING_MESSAGES_MEDIA_VIEW_RATE` | marketing_messages_media_view_rate | +| `MARKETING_MESSAGES_PHONE_CALL_BTN_CLICK_RATE` | marketing_messages_phone_call_btn_click_rate | +| `MARKETING_MESSAGES_QUICK_REPLY_BTN_CLICK_RATE` | marketing_messages_quick_reply_btn_click_rate | +| `MARKETING_MESSAGES_READ_RATE` | marketing_messages_read_rate | +| `MARKETING_MESSAGES_SPEND` | marketing_messages_spend | +| `MARKETING_MESSAGES_WEBSITE_PURCHASE_VALUES` | marketing_messages_website_purchase_values | +| `MOBILE_APP_PURCHASE_ROAS` | mobile_app_purchase_roas | +| `OBJECTIVE` | objective | +| `ONSITE_CONVERSION_MESSAGING_DETECTED_PURCHASE_DEDUPED` | onsite_conversion_messaging_detected_purchase_deduped | +| `OPTIMIZATION_GOAL` | optimization_goal | +| `OUTBOUND_CLICKS` | outbound_clicks | +| `OUTBOUND_CLICKS_CTR` | outbound_clicks_ctr | +| `PLACE_PAGE_NAME` | place_page_name | +| `PURCHASE_ROAS` | purchase_roas | +| `QUALIFYING_QUESTION_QUALIFY_ANSWER_RATE` | qualifying_question_qualify_answer_rate | +| `QUALITY_RANKING` | quality_ranking | +| `REACH` | reach | +| `SHOPS_ASSISTED_PURCHASES` | shops_assisted_purchases | +| `SOCIAL_SPEND` | social_spend | +| `SPEND` | spend | +| `TOTAL_POSTBACKS` | total_postbacks | +| `TOTAL_POSTBACKS_DETAILED` | total_postbacks_detailed | +| `TOTAL_POSTBACKS_DETAILED_V4` | total_postbacks_detailed_v4 | +| `UNIQUE_ACTIONS` | unique_actions | +| `UNIQUE_CLICKS` | unique_clicks | +| `UNIQUE_CONVERSIONS` | unique_conversions | +| `UNIQUE_CTR` | unique_ctr | +| `UNIQUE_INLINE_LINK_CLICK_CTR` | unique_inline_link_click_ctr | +| `UNIQUE_INLINE_LINK_CLICKS` | unique_inline_link_clicks | +| `UNIQUE_LINK_CLICKS_CTR` | unique_link_clicks_ctr | +| `UNIQUE_OUTBOUND_CLICKS` | unique_outbound_clicks | +| `UNIQUE_OUTBOUND_CLICKS_CTR` | unique_outbound_clicks_ctr | +| `UNIQUE_VIDEO_CONTINUOUS_2_SEC_WATCHED_ACTIONS` | unique_video_continuous_2_sec_watched_actions | +| `UNIQUE_VIDEO_VIEW_15_SEC` | unique_video_view_15_sec | +| `UPDATED_TIME` | updated_time | +| `VIDEO_15_SEC_WATCHED_ACTIONS` | video_15_sec_watched_actions | +| `VIDEO_30_SEC_WATCHED_ACTIONS` | video_30_sec_watched_actions | +| `VIDEO_AVG_TIME_WATCHED_ACTIONS` | video_avg_time_watched_actions | +| `VIDEO_CONTINUOUS_2_SEC_WATCHED_ACTIONS` | video_continuous_2_sec_watched_actions | +| `VIDEO_P100_WATCHED_ACTIONS` | video_p100_watched_actions | +| `VIDEO_P25_WATCHED_ACTIONS` | video_p25_watched_actions | +| `VIDEO_P50_WATCHED_ACTIONS` | video_p50_watched_actions | +| `VIDEO_P75_WATCHED_ACTIONS` | video_p75_watched_actions | +| `VIDEO_P95_WATCHED_ACTIONS` | video_p95_watched_actions | +| `VIDEO_PLAY_ACTIONS` | video_play_actions | +| `VIDEO_PLAY_CURVE_ACTIONS` | video_play_curve_actions | +| `VIDEO_PLAY_RETENTION_0_TO_15S_ACTIONS` | video_play_retention_0_to_15s_actions | +| `VIDEO_PLAY_RETENTION_20_TO_60S_ACTIONS` | video_play_retention_20_to_60s_actions | +| `VIDEO_PLAY_RETENTION_GRAPH_ACTIONS` | video_play_retention_graph_actions | +| `VIDEO_THRUPLAY_WATCHED_ACTIONS` | video_thruplay_watched_actions | +| `VIDEO_TIME_WATCHED_ACTIONS` | video_time_watched_actions | +| `WEBSITE_CTR` | website_ctr | +| `WEBSITE_PURCHASE_ROAS` | website_purchase_roas | +| `WISH_BID` | wish_bid | \ No newline at end of file diff --git a/docs/models/sourcefacebookpages.md b/docs/models/sourcefacebookpages.md new file mode 100644 index 00000000..c92e8d1e --- /dev/null +++ b/docs/models/sourcefacebookpages.md @@ -0,0 +1,10 @@ +# SourceFacebookPages + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------- | -------------------------------------------------- | -------------------------------------------------- | -------------------------------------------------- | +| `access_token` | *str* | :heavy_check_mark: | Facebook Page Access Token | +| `page_id` | *str* | :heavy_check_mark: | Page ID | +| `source_type` | [models.FacebookPages](../models/facebookpages.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcefastbill.md b/docs/models/sourcefastbill.md new file mode 100644 index 00000000..64e91a56 --- /dev/null +++ b/docs/models/sourcefastbill.md @@ -0,0 +1,10 @@ +# SourceFastbill + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------- | ---------------------------------------- | ---------------------------------------- | ---------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Fastbill API key | +| `username` | *str* | :heavy_check_mark: | Username for Fastbill account | +| `source_type` | [models.Fastbill](../models/fastbill.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcefastly.md b/docs/models/sourcefastly.md new file mode 100644 index 00000000..2878aa62 --- /dev/null +++ b/docs/models/sourcefastly.md @@ -0,0 +1,10 @@ +# SourceFastly + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `fastly_api_token` | *str* | :heavy_check_mark: | Your Fastly API token. You can generate this token in the Fastly web interface under Account Settings or via the Fastly API. Ensure the token has the appropriate scope for your use case. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Fastly](../models/fastly.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcefaunadeletionmode.md b/docs/models/sourcefaunadeletionmode.md index 5bc4eca1..5218ce79 100644 --- a/docs/models/sourcefaunadeletionmode.md +++ b/docs/models/sourcefaunadeletionmode.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| -------- | -------- | -| `IGNORE` | ignore | \ No newline at end of file +| Name | Value | +| --------------- | --------------- | +| `DELETED_FIELD` | deleted_field | \ No newline at end of file diff --git a/docs/models/sourcefaunaschemasdeletionmode.md b/docs/models/sourcefaunaschemasdeletionmode.md index 31fa4e7b..cd85233b 100644 --- a/docs/models/sourcefaunaschemasdeletionmode.md +++ b/docs/models/sourcefaunaschemasdeletionmode.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| --------------- | --------------- | -| `DELETED_FIELD` | deleted_field | \ No newline at end of file +| Name | Value | +| -------- | -------- | +| `IGNORE` | ignore | \ No newline at end of file diff --git a/docs/models/sourcefile.md b/docs/models/sourcefile.md index f306c281..b3e7ad19 100644 --- a/docs/models/sourcefile.md +++ b/docs/models/sourcefile.md @@ -8,6 +8,6 @@ | `dataset_name` | *str* | :heavy_check_mark: | The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only). | | | `provider` | [models.StorageProvider](../models/storageprovider.md) | :heavy_check_mark: | The storage Provider or Location of the file(s) which should be replicated. | | | `url` | *str* | :heavy_check_mark: | The URL path to access the file which should be replicated. | https://storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv | -| `format` | [Optional[models.SourceFileFileFormat]](../models/sourcefilefileformat.md) | :heavy_minus_sign: | The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs). | | +| `format` | [Optional[models.FileFormat]](../models/fileformat.md) | :heavy_minus_sign: | The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs). | | | `reader_options` | *Optional[str]* | :heavy_minus_sign: | This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior. | {} | | `source_type` | [models.File](../models/file.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcefilefileformat.md b/docs/models/sourcefilefileformat.md deleted file mode 100644 index 73e28e47..00000000 --- a/docs/models/sourcefilefileformat.md +++ /dev/null @@ -1,18 +0,0 @@ -# SourceFileFileFormat - -The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs). - - -## Values - -| Name | Value | -| -------------- | -------------- | -| `CSV` | csv | -| `JSON` | json | -| `JSONL` | jsonl | -| `EXCEL` | excel | -| `EXCEL_BINARY` | excel_binary | -| `FWF` | fwf | -| `FEATHER` | feather | -| `PARQUET` | parquet | -| `YAML` | yaml | \ No newline at end of file diff --git a/docs/models/sourcefullstory.md b/docs/models/sourcefullstory.md new file mode 100644 index 00000000..64d3ac62 --- /dev/null +++ b/docs/models/sourcefullstory.md @@ -0,0 +1,10 @@ +# SourceFullstory + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | +| `api_key` | *str* | :heavy_check_mark: | API Key for the fullstory.com API. | +| `uid` | *str* | :heavy_check_mark: | User ID for the fullstory.com API. | +| `source_type` | [models.Fullstory](../models/fullstory.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcegcsauthenticateviagoogleoauth.md b/docs/models/sourcegcsauthenticateviagoogleoauth.md new file mode 100644 index 00000000..86a07995 --- /dev/null +++ b/docs/models/sourcegcsauthenticateviagoogleoauth.md @@ -0,0 +1,12 @@ +# SourceGcsAuthenticateViaGoogleOAuth + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `access_token` | *str* | :heavy_check_mark: | Access Token | +| `client_id` | *str* | :heavy_check_mark: | Client ID | +| `client_secret` | *str* | :heavy_check_mark: | Client Secret | +| `refresh_token` | *str* | :heavy_check_mark: | Access Token | +| `auth_type` | [Optional[models.SourceGcsAuthType]](../models/sourcegcsauthtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcegcsauthentication.md b/docs/models/sourcegcsauthentication.md index df7c66cc..251e0f13 100644 --- a/docs/models/sourcegcsauthentication.md +++ b/docs/models/sourcegcsauthentication.md @@ -5,10 +5,10 @@ Credentials for connecting to the Google Cloud Storage API ## Supported Types -### `models.AuthenticateViaGoogleOAuth` +### `models.SourceGcsAuthenticateViaGoogleOAuth` ```python -value: models.AuthenticateViaGoogleOAuth = /* values here */ +value: models.SourceGcsAuthenticateViaGoogleOAuth = /* values here */ ``` ### `models.ServiceAccountAuthentication` diff --git a/docs/models/sourcegcsformat.md b/docs/models/sourcegcsformat.md index dae332ba..7a4a6199 100644 --- a/docs/models/sourcegcsformat.md +++ b/docs/models/sourcegcsformat.md @@ -29,10 +29,10 @@ value: models.SourceGcsJsonlFormat = /* values here */ value: models.SourceGcsParquetFormat = /* values here */ ``` -### `models.UnstructuredDocumentFormat` +### `models.SourceGcsUnstructuredDocumentFormat` ```python -value: models.UnstructuredDocumentFormat = /* values here */ +value: models.SourceGcsUnstructuredDocumentFormat = /* values here */ ``` ### `models.ExcelFormat` diff --git a/docs/models/documentfiletypeformatexperimental.md b/docs/models/sourcegcsunstructureddocumentformat.md similarity index 95% rename from docs/models/documentfiletypeformatexperimental.md rename to docs/models/sourcegcsunstructureddocumentformat.md index be584ff6..9d5d58e2 100644 --- a/docs/models/documentfiletypeformatexperimental.md +++ b/docs/models/sourcegcsunstructureddocumentformat.md @@ -1,4 +1,4 @@ -# DocumentFileTypeFormatExperimental +# SourceGcsUnstructuredDocumentFormat Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file. @@ -7,7 +7,7 @@ Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one rec | Field | Type | Required | Description | | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `filetype` | [Optional[models.SourceAzureBlobStorageSchemasStreamsFormatFiletype]](../models/sourceazureblobstorageschemasstreamsformatfiletype.md) | :heavy_minus_sign: | N/A | -| `processing` | [Optional[models.Processing]](../models/processing.md) | :heavy_minus_sign: | Processing configuration | +| `filetype` | [Optional[models.SourceGcsSchemasStreamsFormatFormatFiletype]](../models/sourcegcsschemasstreamsformatformatfiletype.md) | :heavy_minus_sign: | N/A | +| `processing` | [Optional[models.SourceGcsProcessing]](../models/sourcegcsprocessing.md) | :heavy_minus_sign: | Processing configuration | | `skip_unprocessable_files` | *Optional[bool]* | :heavy_minus_sign: | If true, skip files that cannot be parsed and pass the error message along as the _ab_source_file_parse_error field. If false, fail the sync. | -| `strategy` | [Optional[models.ParsingStrategy]](../models/parsingstrategy.md) | :heavy_minus_sign: | The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf | \ No newline at end of file +| `strategy` | [Optional[models.SourceGcsParsingStrategy]](../models/sourcegcsparsingstrategy.md) | :heavy_minus_sign: | The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf | \ No newline at end of file diff --git a/docs/models/sourcegiphy.md b/docs/models/sourcegiphy.md new file mode 100644 index 00000000..2c384231 --- /dev/null +++ b/docs/models/sourcegiphy.md @@ -0,0 +1,14 @@ +# SourceGiphy + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your GIPHY API Key. You can create and find your API key in the GIPHY Developer Dashboard at https://developers.giphy.com/dashboard/. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `query` | *Optional[str]* | :heavy_minus_sign: | A query for search endpoint | +| `query_for_clips` | *Optional[str]* | :heavy_minus_sign: | Query for clips search endpoint | +| `query_for_gif` | *Optional[str]* | :heavy_minus_sign: | Query for gif search endpoint | +| `query_for_stickers` | *Optional[str]* | :heavy_minus_sign: | Query for stickers search endpoint | +| `source_type` | [models.Giphy](../models/giphy.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcegithubauthentication.md b/docs/models/sourcegithubauthentication.md index 425e18e9..f90c19cc 100644 --- a/docs/models/sourcegithubauthentication.md +++ b/docs/models/sourcegithubauthentication.md @@ -5,10 +5,10 @@ Choose how to authenticate to GitHub ## Supported Types -### `models.OAuth` +### `models.SourceGithubOAuth` ```python -value: models.OAuth = /* values here */ +value: models.SourceGithubOAuth = /* values here */ ``` ### `models.SourceGithubPersonalAccessToken` diff --git a/docs/models/sourcegithuboauth.md b/docs/models/sourcegithuboauth.md new file mode 100644 index 00000000..4505e10e --- /dev/null +++ b/docs/models/sourcegithuboauth.md @@ -0,0 +1,11 @@ +# SourceGithubOAuth + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | +| `access_token` | *str* | :heavy_check_mark: | OAuth access token | +| `client_id` | *Optional[str]* | :heavy_minus_sign: | OAuth Client Id | +| `client_secret` | *Optional[str]* | :heavy_minus_sign: | OAuth Client secret | +| `option_title` | [Optional[models.OptionTitle]](../models/optiontitle.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcegologin.md b/docs/models/sourcegologin.md new file mode 100644 index 00000000..c0a9692d --- /dev/null +++ b/docs/models/sourcegologin.md @@ -0,0 +1,10 @@ +# SourceGologin + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | API Key found at `https://app.gologin.com/personalArea/TokenApi` | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Gologin](../models/gologin.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcegoogleanalyticsdataapi.md b/docs/models/sourcegoogleanalyticsdataapi.md index ddbe8707..667d0c69 100644 --- a/docs/models/sourcegoogleanalyticsdataapi.md +++ b/docs/models/sourcegoogleanalyticsdataapi.md @@ -9,6 +9,7 @@ | `convert_conversions_event` | *Optional[bool]* | :heavy_minus_sign: | Enables conversion of `conversions:*` event metrics from integers to floats. This is beneficial for preventing data rounding when the API returns float values for any `conversions:*` fields. | | | `credentials` | [Optional[models.SourceGoogleAnalyticsDataAPICredentials]](../models/sourcegoogleanalyticsdataapicredentials.md) | :heavy_minus_sign: | Credentials for the service | | | `custom_reports_array` | List[[models.SourceGoogleAnalyticsDataAPICustomReportConfig](../models/sourcegoogleanalyticsdataapicustomreportconfig.md)] | :heavy_minus_sign: | You can add your Custom Analytics report by creating one. | | +| `date_ranges_end_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | The end date from which to replicate report data in the format YYYY-MM-DD. Data generated after this date will not be included in the report. Not applied to custom Cohort reports. When no date is provided or the date is in the future, the date from today is used. | 2021-01-31 | | `date_ranges_start_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | The start date from which to replicate report data in the format YYYY-MM-DD. Data generated before this date will not be included in the report. Not applied to custom Cohort reports. | 2021-01-01 | | `keep_empty_rows` | *Optional[bool]* | :heavy_minus_sign: | If false, each row with all metrics equal to 0 will not be returned. If true, these rows will be returned if they are not separately removed by a filter. More information is available in the documentation. | | | `lookback_window` | *Optional[int]* | :heavy_minus_sign: | Since attribution changes after the event date, and Google Analytics has a data processing latency, we should specify how many days in the past we should refresh the data in every run. So if you set it at 5 days, in every sync it will fetch the last bookmark date minus 5 days. | 2 | diff --git a/docs/models/sourcegoogledrive.md b/docs/models/sourcegoogledrive.md index 4bbb5a17..d4265e5b 100644 --- a/docs/models/sourcegoogledrive.md +++ b/docs/models/sourcegoogledrive.md @@ -11,5 +11,6 @@ that are needed when users configure a file-based source. | `credentials` | [models.SourceGoogleDriveAuthentication](../models/sourcegoogledriveauthentication.md) | :heavy_check_mark: | Credentials for connecting to the Google Drive API | | | `folder_url` | *str* | :heavy_check_mark: | URL for the folder you want to sync. Using individual streams and glob patterns, it's possible to only sync a subset of all files located in the folder. | https://drive.google.com/drive/folders/1Xaz0vXXXX2enKnNYU5qSt9NS70gvMyYn | | `streams` | List[[models.SourceGoogleDriveFileBasedStreamConfig](../models/sourcegoogledrivefilebasedstreamconfig.md)] | :heavy_check_mark: | Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. | | +| `delivery_method` | [Optional[models.DeliveryMethod]](../models/deliverymethod.md) | :heavy_minus_sign: | N/A | | | `source_type` | [models.SourceGoogleDriveGoogleDrive](../models/sourcegoogledrivegoogledrive.md) | :heavy_check_mark: | N/A | | | `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated. | 2021-01-01T00:00:00.000000Z | \ No newline at end of file diff --git a/docs/models/sourcegoogledrivedeliverytype.md b/docs/models/sourcegoogledrivedeliverytype.md new file mode 100644 index 00000000..6a2e029f --- /dev/null +++ b/docs/models/sourcegoogledrivedeliverytype.md @@ -0,0 +1,8 @@ +# SourceGoogleDriveDeliveryType + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `USE_FILE_TRANSFER` | use_file_transfer | \ No newline at end of file diff --git a/docs/models/sourcegoogledriveexcelformat.md b/docs/models/sourcegoogledriveexcelformat.md new file mode 100644 index 00000000..69161e11 --- /dev/null +++ b/docs/models/sourcegoogledriveexcelformat.md @@ -0,0 +1,8 @@ +# SourceGoogleDriveExcelFormat + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------ | +| `filetype` | [Optional[models.SourceGoogleDriveSchemasStreamsFormatFormat6Filetype]](../models/sourcegoogledriveschemasstreamsformatformat6filetype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcegoogledrivefilebasedstreamconfig.md b/docs/models/sourcegoogledrivefilebasedstreamconfig.md index 8cc7d88f..718774f4 100644 --- a/docs/models/sourcegoogledrivefilebasedstreamconfig.md +++ b/docs/models/sourcegoogledrivefilebasedstreamconfig.md @@ -10,5 +10,6 @@ | `days_to_sync_if_history_is_full` | *Optional[int]* | :heavy_minus_sign: | When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. | | `globs` | List[*str*] | :heavy_minus_sign: | The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here. | | `input_schema` | *Optional[str]* | :heavy_minus_sign: | The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files. | +| `recent_n_files_to_read_for_schema_discovery` | *Optional[int]* | :heavy_minus_sign: | The number of resent files which will be used to discover the schema for this stream. | | `schemaless` | *Optional[bool]* | :heavy_minus_sign: | When enabled, syncs will not validate or structure records against the stream's schema. | | `validation_policy` | [Optional[models.SourceGoogleDriveValidationPolicy]](../models/sourcegoogledrivevalidationpolicy.md) | :heavy_minus_sign: | The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. | \ No newline at end of file diff --git a/docs/models/sourcegoogledriveformat.md b/docs/models/sourcegoogledriveformat.md index 09cffae4..553893ea 100644 --- a/docs/models/sourcegoogledriveformat.md +++ b/docs/models/sourcegoogledriveformat.md @@ -29,9 +29,15 @@ value: models.SourceGoogleDriveJsonlFormat = /* values here */ value: models.SourceGoogleDriveParquetFormat = /* values here */ ``` -### `models.SourceGoogleDriveDocumentFileTypeFormatExperimental` +### `models.SourceGoogleDriveUnstructuredDocumentFormat` ```python -value: models.SourceGoogleDriveDocumentFileTypeFormatExperimental = /* values here */ +value: models.SourceGoogleDriveUnstructuredDocumentFormat = /* values here */ +``` + +### `models.SourceGoogleDriveExcelFormat` + +```python +value: models.SourceGoogleDriveExcelFormat = /* values here */ ``` diff --git a/docs/models/sourcegoogledriveschemasdeliverytype.md b/docs/models/sourcegoogledriveschemasdeliverytype.md new file mode 100644 index 00000000..e82e1b5b --- /dev/null +++ b/docs/models/sourcegoogledriveschemasdeliverytype.md @@ -0,0 +1,8 @@ +# SourceGoogleDriveSchemasDeliveryType + + +## Values + +| Name | Value | +| -------------------------- | -------------------------- | +| `USE_PERMISSIONS_TRANSFER` | use_permissions_transfer | \ No newline at end of file diff --git a/docs/models/sourcegoogledriveschemasstreamsformatformat6filetype.md b/docs/models/sourcegoogledriveschemasstreamsformatformat6filetype.md new file mode 100644 index 00000000..bebf597a --- /dev/null +++ b/docs/models/sourcegoogledriveschemasstreamsformatformat6filetype.md @@ -0,0 +1,8 @@ +# SourceGoogleDriveSchemasStreamsFormatFormat6Filetype + + +## Values + +| Name | Value | +| ------- | ------- | +| `EXCEL` | excel | \ No newline at end of file diff --git a/docs/models/sourcegoogledrivedocumentfiletypeformatexperimental.md b/docs/models/sourcegoogledriveunstructureddocumentformat.md similarity index 99% rename from docs/models/sourcegoogledrivedocumentfiletypeformatexperimental.md rename to docs/models/sourcegoogledriveunstructureddocumentformat.md index 3d5f4781..f92878ae 100644 --- a/docs/models/sourcegoogledrivedocumentfiletypeformatexperimental.md +++ b/docs/models/sourcegoogledriveunstructureddocumentformat.md @@ -1,4 +1,4 @@ -# SourceGoogleDriveDocumentFileTypeFormatExperimental +# SourceGoogleDriveUnstructuredDocumentFormat Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file. diff --git a/docs/models/sourcegooglesearchconsole.md b/docs/models/sourcegooglesearchconsole.md index 935a9a48..b3aa9498 100644 --- a/docs/models/sourcegooglesearchconsole.md +++ b/docs/models/sourcegooglesearchconsole.md @@ -5,10 +5,12 @@ | Field | Type | Required | Description | Example | | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `authorization` | [models.AuthenticationType](../models/authenticationtype.md) | :heavy_check_mark: | N/A | | +| `authorization` | [models.SourceGoogleSearchConsoleAuthenticationType](../models/sourcegooglesearchconsoleauthenticationtype.md) | :heavy_check_mark: | N/A | | | `site_urls` | List[*str*] | :heavy_check_mark: | The URLs of the website property attached to your GSC account. Learn more about properties here. | https://example1.com/ | +| `always_use_aggregation_type_auto` | *Optional[bool]* | :heavy_minus_sign: | Some search analytics streams fail with a 400 error if the specified `aggregationType` is not supported. This is customer implementation dependent and if this error is encountered, enable this setting which will override the existing `aggregationType` to use `auto` which should resolve the stream errors. | | | `custom_reports_array` | List[[models.SourceGoogleSearchConsoleCustomReportConfig](../models/sourcegooglesearchconsolecustomreportconfig.md)] | :heavy_minus_sign: | You can add your Custom Analytics report by creating one. | | | `data_state` | [Optional[models.DataFreshness]](../models/datafreshness.md) | :heavy_minus_sign: | If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation. | final | | `end_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward. | 2021-12-12 | +| `num_workers` | *Optional[int]* | :heavy_minus_sign: | The number of worker threads to use for the sync. For more details on Google Search Console rate limits, refer to the docs. | 30 | | `source_type` | [models.SourceGoogleSearchConsoleGoogleSearchConsole](../models/sourcegooglesearchconsolegooglesearchconsole.md) | :heavy_check_mark: | N/A | | | `start_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. | | \ No newline at end of file diff --git a/docs/models/sourcegooglesearchconsoleauthenticationtype.md b/docs/models/sourcegooglesearchconsoleauthenticationtype.md new file mode 100644 index 00000000..1b5130a5 --- /dev/null +++ b/docs/models/sourcegooglesearchconsoleauthenticationtype.md @@ -0,0 +1,17 @@ +# SourceGoogleSearchConsoleAuthenticationType + + +## Supported Types + +### `models.SourceGoogleSearchConsoleOAuth` + +```python +value: models.SourceGoogleSearchConsoleOAuth = /* values here */ +``` + +### `models.SourceGoogleSearchConsoleServiceAccountKeyAuthentication` + +```python +value: models.SourceGoogleSearchConsoleServiceAccountKeyAuthentication = /* values here */ +``` + diff --git a/docs/models/sourcegooglesheets.md b/docs/models/sourcegooglesheets.md index ca27e610..32367700 100644 --- a/docs/models/sourcegooglesheets.md +++ b/docs/models/sourcegooglesheets.md @@ -3,10 +3,16 @@ ## Fields -| Field | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `credentials` | [models.SourceGoogleSheetsAuthentication](../models/sourcegooglesheetsauthentication.md) | :heavy_check_mark: | Credentials for connecting to the Google Sheets API | | -| `spreadsheet_id` | *str* | :heavy_check_mark: | Enter the link to the Google spreadsheet you want to sync. To copy the link, click the 'Share' button in the top-right corner of the spreadsheet, then click 'Copy link'. | https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG-arw2xy4HR3D-dwUb/edit | -| `batch_size` | *Optional[int]* | :heavy_minus_sign: | Default value is 200. An integer representing row batch size for each sent request to Google Sheets API. Row batch size means how many rows are processed from the google sheet, for example default value 200 would process rows 1-201, then 201-401 and so on. Based on Google Sheets API limits documentation, it is possible to send up to 300 requests per minute, but each individual request has to be processed under 180 seconds, otherwise the request returns a timeout error. In regards to this information, consider network speed and number of columns of the google sheet when deciding a batch_size value. Default value should cover most of the cases, but if a google sheet has over 100,000 records or more, consider increasing batch_size value. | | -| `names_conversion` | *Optional[bool]* | :heavy_minus_sign: | Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based. | | -| `source_type` | [models.SourceGoogleSheetsGoogleSheets](../models/sourcegooglesheetsgooglesheets.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `credentials` | [models.SourceGoogleSheetsAuthentication](../models/sourcegooglesheetsauthentication.md) | :heavy_check_mark: | Credentials for connecting to the Google Sheets API | | +| `spreadsheet_id` | *str* | :heavy_check_mark: | Enter the link to the Google spreadsheet you want to sync. To copy the link, click the 'Share' button in the top-right corner of the spreadsheet, then click 'Copy link'. | https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG-arw2xy4HR3D-dwUb/edit | +| `allow_leading_numbers` | *Optional[bool]* | :heavy_minus_sign: | Allows column names to start with numbers. Example: "50th Percentile" → "50_th_percentile" This option will only work if "Convert Column Names to SQL-Compliant Format (names_conversion)" is enabled. | | +| `batch_size` | *Optional[int]* | :heavy_minus_sign: | Default value is 1000000. An integer representing row batch size for each sent request to Google Sheets API. Row batch size means how many rows are processed from the google sheet, for example default value 1000000 would process rows 2-1000002, then 1000003-2000003 and so on. Based on Google Sheets API limits documentation, it is possible to send up to 300 requests per minute, but each individual request has to be processed under 180 seconds, otherwise the request returns a timeout error. In regards to this information, consider network speed and number of columns of the google sheet when deciding a batch_size value. | | +| `combine_letter_number_pairs` | *Optional[bool]* | :heavy_minus_sign: | Combines adjacent letters and numbers. Example: "Q3 2023" → "q3_2023" This option will only work if "Convert Column Names to SQL-Compliant Format (names_conversion)" is enabled. | | +| `combine_number_word_pairs` | *Optional[bool]* | :heavy_minus_sign: | Combines adjacent numbers and words. Example: "50th Percentile?" → "_50th_percentile_" This option will only work if "Convert Column Names to SQL-Compliant Format (names_conversion)" is enabled. | | +| `names_conversion` | *Optional[bool]* | :heavy_minus_sign: | Converts column names to a SQL-compliant format (snake_case, lowercase, etc). If enabled, you can further customize the sanitization using the options below. | | +| `remove_leading_trailing_underscores` | *Optional[bool]* | :heavy_minus_sign: | Removes leading and trailing underscores from column names. Does not remove leading underscores from column names that start with a number. Example: "50th Percentile? "→ "_50_th_percentile" This option will only work if "Convert Column Names to SQL-Compliant Format (names_conversion)" is enabled. | | +| `remove_special_characters` | *Optional[bool]* | :heavy_minus_sign: | Removes all special characters from column names. Example: "Example ID*" → "example_id" This option will only work if "Convert Column Names to SQL-Compliant Format (names_conversion)" is enabled. | | +| `source_type` | [models.SourceGoogleSheetsGoogleSheets](../models/sourcegooglesheetsgooglesheets.md) | :heavy_check_mark: | N/A | | +| `stream_name_overrides` | List[[models.StreamNameOverrides](../models/streamnameoverrides.md)] | :heavy_minus_sign: | **Overridden streams will default to Sync Mode: Full Refresh (Append), which does not support primary keys. If you want to use primary keys and deduplication, update the sync mode to "Full Refresh \| Overwrite + Deduped" in your connection settings.**
Allows you to rename streams (Google Sheet tab names) as they appear in Airbyte.
Each item should be an object with a `source_stream_name` (the exact name of the sheet/tab in your spreadsheet) and a `custom_stream_name` (the name you want it to appear as in Airbyte and the destination).
If a `source_stream_name` is not found in your spreadsheet, it will be ignored and the default name will be used. This feature only affects stream (sheet/tab) names, not field/column names.
If you want to rename fields or column names, you can do so using the Airbyte Mappings feature after your connection is created. See the Airbyte documentation for more details on how to use Mappings.
Examples:
- To rename a sheet called "Sheet1" to "sales_data", and "2024 Q1" to "q1_2024":
[
{ "source_stream_name": "Sheet1", "custom_stream_name": "sales_data" },
{ "source_stream_name": "2024 Q1", "custom_stream_name": "q1_2024" }
]
- If you do not wish to rename any streams, leave this blank. | | \ No newline at end of file diff --git a/docs/models/sourcegreythr.md b/docs/models/sourcegreythr.md new file mode 100644 index 00000000..c3defee5 --- /dev/null +++ b/docs/models/sourcegreythr.md @@ -0,0 +1,12 @@ +# SourceGreythr + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------- | -------------------------------------- | -------------------------------------- | -------------------------------------- | +| `base_url` | *str* | :heavy_check_mark: | https://api.greythr.com | +| `domain` | *str* | :heavy_check_mark: | Your GreytHR Host URL | +| `username` | *str* | :heavy_check_mark: | N/A | +| `password` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `source_type` | [models.Greythr](../models/greythr.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceharness.md b/docs/models/sourceharness.md new file mode 100644 index 00000000..9875cd73 --- /dev/null +++ b/docs/models/sourceharness.md @@ -0,0 +1,11 @@ +# SourceHarness + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | +| `account_id` | *str* | :heavy_check_mark: | Harness Account ID | | +| `api_key` | *str* | :heavy_check_mark: | N/A | | +| `api_url` | *Optional[str]* | :heavy_minus_sign: | The API URL for fetching data from Harness | https://my-harness-server.example.com | +| `source_type` | [models.Harness](../models/harness.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcehellobaton.md b/docs/models/sourcehellobaton.md new file mode 100644 index 00000000..55703a31 --- /dev/null +++ b/docs/models/sourcehellobaton.md @@ -0,0 +1,10 @@ +# SourceHellobaton + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------- | ------------------------------------------------------- | ------------------------------------------------------- | ------------------------------------------------------- | ------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | authentication key required to access the api endpoints | | +| `company` | *str* | :heavy_check_mark: | Company name that generates your base api url | google | +| `source_type` | [models.Hellobaton](../models/hellobaton.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcehelpscout.md b/docs/models/sourcehelpscout.md new file mode 100644 index 00000000..1415ad74 --- /dev/null +++ b/docs/models/sourcehelpscout.md @@ -0,0 +1,11 @@ +# SourceHelpScout + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `client_id` | *str* | :heavy_check_mark: | N/A | +| `client_secret` | *str* | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.HelpScout](../models/helpscout.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcehoorayhr.md b/docs/models/sourcehoorayhr.md new file mode 100644 index 00000000..c5cc0a87 --- /dev/null +++ b/docs/models/sourcehoorayhr.md @@ -0,0 +1,10 @@ +# SourceHoorayhr + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------- | ---------------------------------------- | ---------------------------------------- | ---------------------------------------- | +| `hoorayhrpassword` | *str* | :heavy_check_mark: | N/A | +| `hoorayhrusername` | *str* | :heavy_check_mark: | N/A | +| `source_type` | [models.Hoorayhr](../models/hoorayhr.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcehubspot.md b/docs/models/sourcehubspot.md index ff9137c4..a5b50585 100644 --- a/docs/models/sourcehubspot.md +++ b/docs/models/sourcehubspot.md @@ -7,5 +7,6 @@ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `credentials` | [models.SourceHubspotAuthentication](../models/sourcehubspotauthentication.md) | :heavy_check_mark: | Choose how to authenticate to HubSpot. | | | `enable_experimental_streams` | *Optional[bool]* | :heavy_minus_sign: | If enabled then experimental streams become available for sync. | | +| `num_worker` | *Optional[int]* | :heavy_minus_sign: | The number of worker threads to use for the sync. | 1 | | `source_type` | [models.SourceHubspotHubspot](../models/sourcehubspothubspot.md) | :heavy_check_mark: | N/A | | | `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. If not set, "2006-06-01T00:00:00Z" (Hubspot creation date) will be used as start date. It's recommended to provide relevant to your data start date value to optimize synchronization. | 2017-01-25T00:00:00Z | \ No newline at end of file diff --git a/docs/models/sourcehuggingfacedatasets.md b/docs/models/sourcehuggingfacedatasets.md new file mode 100644 index 00000000..2201e445 --- /dev/null +++ b/docs/models/sourcehuggingfacedatasets.md @@ -0,0 +1,11 @@ +# SourceHuggingFaceDatasets + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `dataset_name` | *str* | :heavy_check_mark: | N/A | +| `dataset_splits` | List[*Any*] | :heavy_minus_sign: | Splits to import. Will import all of them if nothing is provided (see https://huggingface.co/docs/dataset-viewer/en/configs_and_splits for more details) | +| `dataset_subsets` | List[*Any*] | :heavy_minus_sign: | Dataset Subsets to import. Will import all of them if nothing is provided (see https://huggingface.co/docs/dataset-viewer/en/configs_and_splits for more details) | +| `source_type` | [models.HuggingFaceDatasets](../models/huggingfacedatasets.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcehuntr.md b/docs/models/sourcehuntr.md new file mode 100644 index 00000000..1c19fea2 --- /dev/null +++ b/docs/models/sourcehuntr.md @@ -0,0 +1,9 @@ +# SourceHuntr + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------- | ---------------------------------- | ---------------------------------- | ---------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `source_type` | [models.Huntr](../models/huntr.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceimagga.md b/docs/models/sourceimagga.md new file mode 100644 index 00000000..6cc44654 --- /dev/null +++ b/docs/models/sourceimagga.md @@ -0,0 +1,11 @@ +# SourceImagga + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your Imagga API key, available in your Imagga dashboard. Could be found at `https://imagga.com/profile/dashboard` | +| `api_secret` | *str* | :heavy_check_mark: | Your Imagga API secret, available in your Imagga dashboard. Could be found at `https://imagga.com/profile/dashboard` | +| `img_for_detection` | *Optional[str]* | :heavy_minus_sign: | An image for detection endpoints | +| `source_type` | [models.Imagga](../models/imagga.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceinsightful.md b/docs/models/sourceinsightful.md new file mode 100644 index 00000000..7a100e62 --- /dev/null +++ b/docs/models/sourceinsightful.md @@ -0,0 +1,10 @@ +# SourceInsightful + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `api_token` | *str* | :heavy_check_mark: | Your API token for accessing the Insightful API. Generate it by logging in as an Admin to your organization's account, navigating to the API page, and creating a new token. Note that this token will only be shown once, so store it securely. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Insightful](../models/insightful.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceinstagram.md b/docs/models/sourceinstagram.md index 94c89707..7dd146d7 100644 --- a/docs/models/sourceinstagram.md +++ b/docs/models/sourceinstagram.md @@ -6,5 +6,8 @@ | Field | Type | Required | Description | Example | | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `access_token` | *str* | :heavy_check_mark: | The value of the access token generated with instagram_basic, instagram_manage_insights, pages_show_list, pages_read_engagement, Instagram Public Content Access permissions. See the docs for more information | | +| `client_id` | *Optional[str]* | :heavy_minus_sign: | The Client ID for your Oauth application | | +| `client_secret` | *Optional[str]* | :heavy_minus_sign: | The Client Secret for your Oauth application | | +| `num_workers` | *Optional[int]* | :heavy_minus_sign: | The number of worker threads to use for the sync. | 1 | | `source_type` | [models.SourceInstagramInstagram](../models/sourceinstagraminstagram.md) | :heavy_check_mark: | N/A | | | `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date from which you'd like to replicate data for User Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. If left blank, the start date will be set to 2 years before the present date. | 2017-01-25T00:00:00Z | \ No newline at end of file diff --git a/docs/models/sourceintercom.md b/docs/models/sourceintercom.md index 67f777cc..0bf562f3 100644 --- a/docs/models/sourceintercom.md +++ b/docs/models/sourceintercom.md @@ -11,4 +11,4 @@ | `client_id` | *Optional[str]* | :heavy_minus_sign: | Client Id for your Intercom application. | | | `client_secret` | *Optional[str]* | :heavy_minus_sign: | Client Secret for your Intercom application. | | | `lookback_window` | *Optional[int]* | :heavy_minus_sign: | The number of days to shift the state value backward for record sync | 60 | -| `source_type` | [models.SourceIntercomIntercom](../models/sourceintercomintercom.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| `source_type` | [models.Intercom](../models/intercom.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourceintruder.md b/docs/models/sourceintruder.md new file mode 100644 index 00000000..eb8fb1c3 --- /dev/null +++ b/docs/models/sourceintruder.md @@ -0,0 +1,9 @@ +# SourceIntruder + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------- | +| `access_token` | *str* | :heavy_check_mark: | Your API Access token. See here. | +| `source_type` | [models.Intruder](../models/intruder.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcejamfpro.md b/docs/models/sourcejamfpro.md new file mode 100644 index 00000000..e5b96aed --- /dev/null +++ b/docs/models/sourcejamfpro.md @@ -0,0 +1,11 @@ +# SourceJamfPro + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------ | ------------------------------------------------ | ------------------------------------------------ | ------------------------------------------------ | +| `subdomain` | *str* | :heavy_check_mark: | The unique subdomain for your Jamf Pro instance. | +| `username` | *str* | :heavy_check_mark: | N/A | +| `password` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `source_type` | [models.JamfPro](../models/jamfpro.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcejira.md b/docs/models/sourcejira.md index 51327866..7a38962f 100644 --- a/docs/models/sourcejira.md +++ b/docs/models/sourcejira.md @@ -8,7 +8,6 @@ | `api_token` | *str* | :heavy_check_mark: | Jira API Token. See the docs for more information on how to generate this key. API Token is used for Authorization to your account by BasicAuth. | | | `domain` | *str* | :heavy_check_mark: | The Domain for your Jira account, e.g. airbyteio.atlassian.net, airbyteio.jira.com, jira.your-domain.com | .atlassian.net | | `email` | *str* | :heavy_check_mark: | The user email for your Jira account which you used to generate the API token. This field is used for Authorization to your account by BasicAuth. | | -| `enable_experimental_streams` | *Optional[bool]* | :heavy_minus_sign: | Allow the use of experimental streams which rely on undocumented Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables for more info. | | | `lookback_window_minutes` | *Optional[int]* | :heavy_minus_sign: | When set to N, the connector will always refresh resources created within the past N minutes. By default, updated objects that are not newly created are not incrementally synced. | 60 | | `num_workers` | *Optional[int]* | :heavy_minus_sign: | The number of worker threads to use for the sync. | 1 | | `projects` | List[*str*] | :heavy_minus_sign: | List of Jira project keys to replicate data for, or leave it empty if you want to replicate data for all projects. | PROJ1 | diff --git a/docs/models/sourcejotformapiendpoint.md b/docs/models/sourcejotformapiendpoint.md index b0f9f3b6..087ed469 100644 --- a/docs/models/sourcejotformapiendpoint.md +++ b/docs/models/sourcejotformapiendpoint.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------- | ------- | -| `BASIC` | basic | \ No newline at end of file +| Name | Value | +| ------------ | ------------ | +| `ENTERPRISE` | enterprise | \ No newline at end of file diff --git a/docs/models/sourcejotformschemasapiendpoint.md b/docs/models/sourcejotformschemasapiendpoint.md index ec704719..c500a077 100644 --- a/docs/models/sourcejotformschemasapiendpoint.md +++ b/docs/models/sourcejotformschemasapiendpoint.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------------ | ------------ | -| `ENTERPRISE` | enterprise | \ No newline at end of file +| Name | Value | +| ------- | ------- | +| `BASIC` | basic | \ No newline at end of file diff --git a/docs/models/sourcejudgemereviews.md b/docs/models/sourcejudgemereviews.md new file mode 100644 index 00000000..d1436ecf --- /dev/null +++ b/docs/models/sourcejudgemereviews.md @@ -0,0 +1,11 @@ +# SourceJudgeMeReviews + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `shop_domain` | *str* | :heavy_check_mark: | example.myshopify.com | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.JudgeMeReviews](../models/judgemereviews.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcekeka.md b/docs/models/sourcekeka.md new file mode 100644 index 00000000..99b56437 --- /dev/null +++ b/docs/models/sourcekeka.md @@ -0,0 +1,13 @@ +# SourceKeka + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------- | --------------------------------------------- | --------------------------------------------- | --------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `client_id` | *str* | :heavy_check_mark: | Your client identifier for authentication. | +| `client_secret` | *str* | :heavy_check_mark: | Your client secret for secure authentication. | +| `grant_type` | *str* | :heavy_check_mark: | N/A | +| `scope` | *str* | :heavy_check_mark: | N/A | +| `source_type` | [models.Keka](../models/keka.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceklaviyo.md b/docs/models/sourceklaviyo.md index f59c4fb4..967c9663 100644 --- a/docs/models/sourceklaviyo.md +++ b/docs/models/sourceklaviyo.md @@ -7,6 +7,6 @@ | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `api_key` | *str* | :heavy_check_mark: | Klaviyo API Key. See our docs if you need help finding this key. | | | `disable_fetching_predictive_analytics` | *Optional[bool]* | :heavy_minus_sign: | Certain streams like the profiles stream can retrieve predictive analytics data from Klaviyo's API. However, at high volume, this can lead to service availability issues on the API which can be improved by not fetching this field. WARNING: Enabling this setting will stop the "predictive_analytics" column from being populated in your downstream destination. | | -| `num_workers` | *Optional[int]* | :heavy_minus_sign: | The number of worker threads to use for the sync. The performance upper boundary is based on the limit of your Chargebee plan. More info about the rate limit plan tiers can be found on Chargebee's API docs. | 1 | +| `num_workers` | *Optional[int]* | :heavy_minus_sign: | The number of worker threads to use for the sync. The performance upper boundary is based on the limit of your Klaviyo plan. More info about the rate limit plan tiers can be found on Klaviyo's API docs. | 1 | | `source_type` | [models.Klaviyo](../models/klaviyo.md) | :heavy_check_mark: | N/A | | | `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. This field is optional - if not provided, all data will be replicated. | 2017-01-25T00:00:00Z | \ No newline at end of file diff --git a/docs/models/sourcelinear.md b/docs/models/sourcelinear.md new file mode 100644 index 00000000..ecb3a76b --- /dev/null +++ b/docs/models/sourcelinear.md @@ -0,0 +1,9 @@ +# SourceLinear + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------ | ------------------------------------ | ------------------------------------ | ------------------------------------ | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `source_type` | [models.Linear](../models/linear.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcelinkedinads.md b/docs/models/sourcelinkedinads.md index ceaafb86..461a804e 100644 --- a/docs/models/sourcelinkedinads.md +++ b/docs/models/sourcelinkedinads.md @@ -10,4 +10,5 @@ | `ad_analytics_reports` | List[[models.AdAnalyticsReportConfiguration](../models/adanalyticsreportconfiguration.md)] | :heavy_minus_sign: | N/A | | | `credentials` | [Optional[models.SourceLinkedinAdsAuthentication]](../models/sourcelinkedinadsauthentication.md) | :heavy_minus_sign: | N/A | | | `lookback_window` | *Optional[int]* | :heavy_minus_sign: | How far into the past to look for records. (in days) | | +| `num_workers` | *Optional[int]* | :heavy_minus_sign: | The number of workers to use for the connector. This is used to limit the number of concurrent requests to the LinkedIn Ads API. If not set, the default is 3 workers. | | | `source_type` | [models.SourceLinkedinAdsLinkedinAds](../models/sourcelinkedinadslinkedinads.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcemailchimpapikey.md b/docs/models/sourcemailchimpapikey.md new file mode 100644 index 00000000..bd78312d --- /dev/null +++ b/docs/models/sourcemailchimpapikey.md @@ -0,0 +1,9 @@ +# SourceMailchimpAPIKey + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | +| `apikey` | *str* | :heavy_check_mark: | Mailchimp API Key. See the docs for information on how to generate this key. | +| `auth_type` | [models.SourceMailchimpSchemasAuthType](../models/sourcemailchimpschemasauthtype.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcemailchimpauthentication.md b/docs/models/sourcemailchimpauthentication.md index 8c24beba..86cfb4a9 100644 --- a/docs/models/sourcemailchimpauthentication.md +++ b/docs/models/sourcemailchimpauthentication.md @@ -9,9 +9,9 @@ value: models.SourceMailchimpOAuth20 = /* values here */ ``` -### `models.APIKey` +### `models.SourceMailchimpAPIKey` ```python -value: models.APIKey = /* values here */ +value: models.SourceMailchimpAPIKey = /* values here */ ``` diff --git a/docs/models/sourceorbit.md b/docs/models/sourcemailersend.md similarity index 51% rename from docs/models/sourceorbit.md rename to docs/models/sourcemailersend.md index 26c4d84f..ac71a981 100644 --- a/docs/models/sourceorbit.md +++ b/docs/models/sourcemailersend.md @@ -1,11 +1,11 @@ -# SourceOrbit +# SourceMailersend ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------- | -| `api_token` | *str* | :heavy_check_mark: | Authorizes you to work with Orbit workspaces associated with the token. | -| `workspace` | *str* | :heavy_check_mark: | The unique name of the workspace that your API token is associated with. | -| `source_type` | [models.Orbit](../models/orbit.md) | :heavy_check_mark: | N/A | -| `start_date` | *Optional[str]* | :heavy_minus_sign: | Date in the format 2022-06-26. Only load members whose last activities are after this date. | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------- | +| `api_token` | *str* | :heavy_check_mark: | Your API Token. See here. | | +| `domain_id` | *str* | :heavy_check_mark: | The domain entity in mailersend | airbyte.com | +| `source_type` | [models.Mailersend](../models/mailersend.md) | :heavy_check_mark: | N/A | | +| `start_date` | *Optional[float]* | :heavy_minus_sign: | Timestamp is assumed to be UTC. | 123131321 | \ No newline at end of file diff --git a/docs/models/sourcequickbooksoauth20.md b/docs/models/sourcemendeley.md similarity index 56% rename from docs/models/sourcequickbooksoauth20.md rename to docs/models/sourcemendeley.md index cd533c98..9dcf705e 100644 --- a/docs/models/sourcequickbooksoauth20.md +++ b/docs/models/sourcemendeley.md @@ -1,14 +1,14 @@ -# SourceQuickbooksOAuth20 +# SourceMendeley ## Fields -| Field | Type | Required | Description | -| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `access_token` | *str* | :heavy_check_mark: | Access token for making authenticated requests. | -| `client_id` | *str* | :heavy_check_mark: | Identifies which app is making the request. Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production. | -| `client_secret` | *str* | :heavy_check_mark: | Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production. | -| `realm_id` | *str* | :heavy_check_mark: | Labeled Company ID. The Make API Calls panel is populated with the realm id and the current access token. | -| `refresh_token` | *str* | :heavy_check_mark: | A token used when refreshing the access token. | -| `token_expiry_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | The date-time when the access token should be refreshed. | -| `auth_type` | [Optional[models.SourceQuickbooksAuthType]](../models/sourcequickbooksauthtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `client_id` | *str* | :heavy_check_mark: | Could be found at `https://dev.mendeley.com/myapps.html` | +| `client_refresh_token` | *str* | :heavy_check_mark: | Use cURL or Postman with the OAuth 2.0 Authorization tab. Set the Auth URL to https://api.mendeley.com/oauth/authorize, the Token URL to https://api.mendeley.com/oauth/token, and use all as the scope. | +| `client_secret` | *str* | :heavy_check_mark: | Could be found at `https://dev.mendeley.com/myapps.html` | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `name_for_institution` | *Optional[str]* | :heavy_minus_sign: | The name parameter for institutions search | +| `query_for_catalog` | *Optional[str]* | :heavy_minus_sign: | Query for catalog search | +| `source_type` | [models.Mendeley](../models/mendeley.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcemercadoads.md b/docs/models/sourcemercadoads.md new file mode 100644 index 00000000..0ef91af3 --- /dev/null +++ b/docs/models/sourcemercadoads.md @@ -0,0 +1,14 @@ +# SourceMercadoAds + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | +| `client_id` | *str* | :heavy_check_mark: | N/A | +| `client_refresh_token` | *str* | :heavy_check_mark: | N/A | +| `client_secret` | *str* | :heavy_check_mark: | N/A | +| `end_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | Cannot exceed 90 days from current day for Product Ads | +| `lookback_days` | *Optional[float]* | :heavy_minus_sign: | N/A | +| `source_type` | [models.MercadoAds](../models/mercadoads.md) | :heavy_check_mark: | N/A | +| `start_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | Cannot exceed 90 days from current day for Product Ads, and 90 days from "End Date" on Brand and Display Ads | \ No newline at end of file diff --git a/docs/models/sourcemerge.md b/docs/models/sourcemerge.md new file mode 100644 index 00000000..0ed33a94 --- /dev/null +++ b/docs/models/sourcemerge.md @@ -0,0 +1,11 @@ +# SourceMerge + + +## Fields + +| Field | Type | Required | Description | Example | +| ----------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- | +| `account_token` | *str* | :heavy_check_mark: | Link your other integrations with account credentials on accounts section to get account token (ref - https://app.merge.dev/linked-accounts/accounts) | | +| `api_token` | *str* | :heavy_check_mark: | API token can be seen at https://app.merge.dev/keys | | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | Date time filter for incremental filter, Specify which date to extract from. | 2022-03-01T00:00:00.000Z | +| `source_type` | [models.Merge](../models/merge.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcemicrosoftsharepoint.md b/docs/models/sourcemicrosoftsharepoint.md index 99b797b3..264236a2 100644 --- a/docs/models/sourcemicrosoftsharepoint.md +++ b/docs/models/sourcemicrosoftsharepoint.md @@ -10,7 +10,9 @@ This class combines the authentication details with additional configuration for | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `credentials` | [models.SourceMicrosoftSharepointAuthentication](../models/sourcemicrosoftsharepointauthentication.md) | :heavy_check_mark: | Credentials for connecting to the One Drive API | | | `streams` | List[[models.SourceMicrosoftSharepointFileBasedStreamConfig](../models/sourcemicrosoftsharepointfilebasedstreamconfig.md)] | :heavy_check_mark: | Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. | | +| `delivery_method` | [Optional[models.SourceMicrosoftSharepointDeliveryMethod]](../models/sourcemicrosoftsharepointdeliverymethod.md) | :heavy_minus_sign: | N/A | | | `folder_path` | *Optional[str]* | :heavy_minus_sign: | Path to a specific folder within the drives to search for files. Leave empty to search all folders of the drives. This does not apply to shared items. | | | `search_scope` | [Optional[models.SourceMicrosoftSharepointSearchScope]](../models/sourcemicrosoftsharepointsearchscope.md) | :heavy_minus_sign: | Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both. | | +| `site_url` | *Optional[str]* | :heavy_minus_sign: | Url of SharePoint site to search for files. Leave empty to search in the main site. Use 'https://.sharepoint.com/sites/' to iterate over all sites. | | | `source_type` | [models.SourceMicrosoftSharepointMicrosoftSharepoint](../models/sourcemicrosoftsharepointmicrosoftsharepoint.md) | :heavy_check_mark: | N/A | | | `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated. | 2021-01-01T00:00:00.000000Z | \ No newline at end of file diff --git a/docs/models/sourcemicrosoftsharepointcopyrawfiles.md b/docs/models/sourcemicrosoftsharepointcopyrawfiles.md new file mode 100644 index 00000000..245be768 --- /dev/null +++ b/docs/models/sourcemicrosoftsharepointcopyrawfiles.md @@ -0,0 +1,11 @@ +# SourceMicrosoftSharepointCopyRawFiles + +Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `delivery_type` | [Optional[models.SourceMicrosoftSharepointSchemasDeliveryType]](../models/sourcemicrosoftsharepointschemasdeliverytype.md) | :heavy_minus_sign: | N/A | +| `preserve_directory_structure` | *Optional[bool]* | :heavy_minus_sign: | If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. | \ No newline at end of file diff --git a/docs/models/sourcemicrosoftsharepointdeliverymethod.md b/docs/models/sourcemicrosoftsharepointdeliverymethod.md new file mode 100644 index 00000000..4c391fe8 --- /dev/null +++ b/docs/models/sourcemicrosoftsharepointdeliverymethod.md @@ -0,0 +1,17 @@ +# SourceMicrosoftSharepointDeliveryMethod + + +## Supported Types + +### `models.SourceMicrosoftSharepointReplicateRecords` + +```python +value: models.SourceMicrosoftSharepointReplicateRecords = /* values here */ +``` + +### `models.SourceMicrosoftSharepointCopyRawFiles` + +```python +value: models.SourceMicrosoftSharepointCopyRawFiles = /* values here */ +``` + diff --git a/docs/models/sourcemicrosoftsharepointdeliverytype.md b/docs/models/sourcemicrosoftsharepointdeliverytype.md new file mode 100644 index 00000000..701dfdfa --- /dev/null +++ b/docs/models/sourcemicrosoftsharepointdeliverytype.md @@ -0,0 +1,8 @@ +# SourceMicrosoftSharepointDeliveryType + + +## Values + +| Name | Value | +| ---------------------- | ---------------------- | +| `USE_RECORDS_TRANSFER` | use_records_transfer | \ No newline at end of file diff --git a/docs/models/sourcemicrosoftsharepointreplicaterecords.md b/docs/models/sourcemicrosoftsharepointreplicaterecords.md new file mode 100644 index 00000000..167c2400 --- /dev/null +++ b/docs/models/sourcemicrosoftsharepointreplicaterecords.md @@ -0,0 +1,10 @@ +# SourceMicrosoftSharepointReplicateRecords + +Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | +| `delivery_type` | [Optional[models.SourceMicrosoftSharepointDeliveryType]](../models/sourcemicrosoftsharepointdeliverytype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcemicrosoftsharepointschemasdeliverytype.md b/docs/models/sourcemicrosoftsharepointschemasdeliverytype.md new file mode 100644 index 00000000..5a77f748 --- /dev/null +++ b/docs/models/sourcemicrosoftsharepointschemasdeliverytype.md @@ -0,0 +1,8 @@ +# SourceMicrosoftSharepointSchemasDeliveryType + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `USE_FILE_TRANSFER` | use_file_transfer | \ No newline at end of file diff --git a/docs/models/sourcemixpanel.md b/docs/models/sourcemixpanel.md index 2df08160..6e5b03c9 100644 --- a/docs/models/sourcemixpanel.md +++ b/docs/models/sourcemixpanel.md @@ -9,6 +9,7 @@ | `attribution_window` | *Optional[int]* | :heavy_minus_sign: | A period of time for attributing results to ads and the lookback period after those actions occur during which ad results are counted. Default attribution window is 5 days. (This value should be non-negative integer) | | | `date_window_size` | *Optional[int]* | :heavy_minus_sign: | Defines window size in days, that used to slice through data. You can reduce it, if amount of data in each window is too big for your environment. (This value should be positive integer) | | | `end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date in the format YYYY-MM-DD. Any data after this date will not be replicated. Left empty to always sync to most recent date | 2021-11-16 | +| `export_lookback_window` | *Optional[int]* | :heavy_minus_sign: | The number of seconds to look back from the last synced timestamp during incremental syncs of the Export stream. This ensures no data is missed due to delays in event recording. Default is 0 seconds. Must be a non-negative integer. | | | `page_size` | *Optional[int]* | :heavy_minus_sign: | The number of records to fetch per request for the engage stream. Default is 1000. If you are experiencing long sync times with this stream, try increasing this value. | | | `project_timezone` | *Optional[str]* | :heavy_minus_sign: | Time zone in which integer date times are stored. The project timezone may be found in the project settings in the Mixpanel console. | US/Pacific | | `region` | [Optional[models.SourceMixpanelRegion]](../models/sourcemixpanelregion.md) | :heavy_minus_sign: | The region of mixpanel domain instance either US or EU. | | diff --git a/docs/models/sourcemode.md b/docs/models/sourcemode.md index b65537b7..89dfebfe 100644 --- a/docs/models/sourcemode.md +++ b/docs/models/sourcemode.md @@ -8,4 +8,4 @@ | `api_secret` | *str* | :heavy_check_mark: | API secret to use as the password for Basic Authentication. | | `api_token` | *str* | :heavy_check_mark: | API token to use as the username for Basic Authentication. | | `workspace` | *str* | :heavy_check_mark: | N/A | -| `source_type` | [models.Mode](../models/mode.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| `source_type` | [models.SourceModeMode](../models/sourcemodemode.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcemodemode.md b/docs/models/sourcemodemode.md new file mode 100644 index 00000000..27c84965 --- /dev/null +++ b/docs/models/sourcemodemode.md @@ -0,0 +1,8 @@ +# SourceModeMode + + +## Values + +| Name | Value | +| ------ | ------ | +| `MODE` | mode | \ No newline at end of file diff --git a/docs/models/sourcemonday.md b/docs/models/sourcemonday.md index 0bf94c84..2f0af72f 100644 --- a/docs/models/sourcemonday.md +++ b/docs/models/sourcemonday.md @@ -3,7 +3,9 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | -| `credentials` | [Optional[models.SourceMondayAuthorizationMethod]](../models/sourcemondayauthorizationmethod.md) | :heavy_minus_sign: | N/A | -| `source_type` | [models.SourceMondayMonday](../models/sourcemondaymonday.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `board_ids` | List[*int*] | :heavy_minus_sign: | The IDs of the boards that the Items and Boards streams will extract records from. When left empty, streams will extract records from all boards that exist within the account. | | +| `credentials` | [Optional[models.SourceMondayAuthorizationMethod]](../models/sourcemondayauthorizationmethod.md) | :heavy_minus_sign: | N/A | | +| `num_workers` | *Optional[int]* | :heavy_minus_sign: | The number of worker threads to use for the sync. | 1 | +| `source_type` | [models.SourceMondayMonday](../models/sourcemondaymonday.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcemongodbv2.md b/docs/models/sourcemongodbv2.md index b0002b28..9fcebf3d 100644 --- a/docs/models/sourcemongodbv2.md +++ b/docs/models/sourcemongodbv2.md @@ -7,6 +7,7 @@ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | | `database_config` | [models.ClusterType](../models/clustertype.md) | :heavy_check_mark: | Configures the MongoDB cluster type. | | `discover_sample_size` | *Optional[int]* | :heavy_minus_sign: | The maximum number of documents to sample when attempting to discover the unique fields for a collection. | +| `discover_timeout_seconds` | *Optional[int]* | :heavy_minus_sign: | The amount of time the connector will wait when it discovers a document. Defaults to 600 seconds. Valid range: 5 seconds to 1200 seconds. | | `initial_load_timeout_hours` | *Optional[int]* | :heavy_minus_sign: | The amount of time an initial load is allowed to continue for before catching up on CDC logs. | | `initial_waiting_seconds` | *Optional[int]* | :heavy_minus_sign: | The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. | | `invalid_cdc_cursor_position_behavior` | [Optional[models.InvalidCDCPositionBehaviorAdvanced]](../models/invalidcdcpositionbehavioradvanced.md) | :heavy_minus_sign: | Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss. | diff --git a/docs/models/sourcemongodbv2clustertype.md b/docs/models/sourcemongodbv2clustertype.md index 66253f7e..528b24ef 100644 --- a/docs/models/sourcemongodbv2clustertype.md +++ b/docs/models/sourcemongodbv2clustertype.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------------------- | ------------------- | -| `ATLAS_REPLICA_SET` | ATLAS_REPLICA_SET | \ No newline at end of file +| Name | Value | +| -------------------------- | -------------------------- | +| `SELF_MANAGED_REPLICA_SET` | SELF_MANAGED_REPLICA_SET | \ No newline at end of file diff --git a/docs/models/sourcemongodbv2schemasclustertype.md b/docs/models/sourcemongodbv2schemasclustertype.md index 91180351..0777b143 100644 --- a/docs/models/sourcemongodbv2schemasclustertype.md +++ b/docs/models/sourcemongodbv2schemasclustertype.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| -------------------------- | -------------------------- | -| `SELF_MANAGED_REPLICA_SET` | SELF_MANAGED_REPLICA_SET | \ No newline at end of file +| Name | Value | +| ------------------- | ------------------- | +| `ATLAS_REPLICA_SET` | ATLAS_REPLICA_SET | \ No newline at end of file diff --git a/docs/models/sourcemssqlencryptedtrustservercertificate.md b/docs/models/sourcemssqlencryptedtrustservercertificate.md index 34cdf11a..11a378ec 100644 --- a/docs/models/sourcemssqlencryptedtrustservercertificate.md +++ b/docs/models/sourcemssqlencryptedtrustservercertificate.md @@ -5,6 +5,6 @@ Use the certificate provided by the server without verification. (For testing pu ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | -| `ssl_method` | [models.SourceMssqlSchemasSslMethodSslMethod](../models/sourcemssqlschemassslmethodsslmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | +| `ssl_method` | [models.SourceMssqlSchemasSslMethod](../models/sourcemssqlschemassslmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcemssqlencryptedverifycertificate.md b/docs/models/sourcemssqlencryptedverifycertificate.md index 72420250..99614908 100644 --- a/docs/models/sourcemssqlencryptedverifycertificate.md +++ b/docs/models/sourcemssqlencryptedverifycertificate.md @@ -9,4 +9,4 @@ Verify and use the certificate provided by the server. | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | | `certificate` | *Optional[str]* | :heavy_minus_sign: | certificate of the server, or of the CA that signed the server certificate | | `host_name_in_certificate` | *Optional[str]* | :heavy_minus_sign: | Specifies the host name of the server. The value of this property must match the subject property of the certificate. | -| `ssl_method` | [models.SourceMssqlSchemasSSLMethodSSLMethodSSLMethod](../models/sourcemssqlschemassslmethodsslmethodsslmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| `ssl_method` | [models.SourceMssqlSchemasSslMethodSslMethod](../models/sourcemssqlschemassslmethodsslmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcemssqlschemassslmethod.md b/docs/models/sourcemssqlschemassslmethod.md index 7ee95623..679c130e 100644 --- a/docs/models/sourcemssqlschemassslmethod.md +++ b/docs/models/sourcemssqlschemassslmethod.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------------- | ------------- | -| `UNENCRYPTED` | unencrypted | \ No newline at end of file +| Name | Value | +| ------------------------------------ | ------------------------------------ | +| `ENCRYPTED_TRUST_SERVER_CERTIFICATE` | encrypted_trust_server_certificate | \ No newline at end of file diff --git a/docs/models/sourcemssqlschemassslmethodsslmethod.md b/docs/models/sourcemssqlschemassslmethodsslmethod.md index 10138e1f..8f8988fa 100644 --- a/docs/models/sourcemssqlschemassslmethodsslmethod.md +++ b/docs/models/sourcemssqlschemassslmethodsslmethod.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------------------------------------ | ------------------------------------ | -| `ENCRYPTED_TRUST_SERVER_CERTIFICATE` | encrypted_trust_server_certificate | \ No newline at end of file +| Name | Value | +| ------------------------------ | ------------------------------ | +| `ENCRYPTED_VERIFY_CERTIFICATE` | encrypted_verify_certificate | \ No newline at end of file diff --git a/docs/models/sourcemssqlschemassslmethodsslmethodsslmethod.md b/docs/models/sourcemssqlschemassslmethodsslmethodsslmethod.md index b97ea287..a979631b 100644 --- a/docs/models/sourcemssqlschemassslmethodsslmethodsslmethod.md +++ b/docs/models/sourcemssqlschemassslmethodsslmethodsslmethod.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------------------------------ | ------------------------------ | -| `ENCRYPTED_VERIFY_CERTIFICATE` | encrypted_verify_certificate | \ No newline at end of file +| Name | Value | +| ------------- | ------------- | +| `UNENCRYPTED` | unencrypted | \ No newline at end of file diff --git a/docs/models/sourcemssqlunencrypted.md b/docs/models/sourcemssqlunencrypted.md index bbd49faa..6db7297f 100644 --- a/docs/models/sourcemssqlunencrypted.md +++ b/docs/models/sourcemssqlunencrypted.md @@ -5,6 +5,6 @@ Data transfer will not be encrypted. ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | -| `ssl_method` | [models.SourceMssqlSchemasSslMethod](../models/sourcemssqlschemassslmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------ | +| `ssl_method` | [models.SourceMssqlSchemasSSLMethodSSLMethodSSLMethod](../models/sourcemssqlschemassslmethodsslmethodsslmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcemysql.md b/docs/models/sourcemysql.md index f4fdfbd3..3e188ec2 100644 --- a/docs/models/sourcemysql.md +++ b/docs/models/sourcemysql.md @@ -3,16 +3,18 @@ ## Fields -| Field | Type | Required | Description | Example | -| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `database` | *str* | :heavy_check_mark: | The database name. | | -| `host` | *str* | :heavy_check_mark: | The host name of the database. | | -| `replication_method` | [models.SourceMysqlUpdateMethod](../models/sourcemysqlupdatemethod.md) | :heavy_check_mark: | Configures how data is extracted from the database. | | -| `username` | *str* | :heavy_check_mark: | The username which is used to access the database. | | -| `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters. | | -| `password` | *Optional[str]* | :heavy_minus_sign: | The password associated with the username. | | -| `port` | *Optional[int]* | :heavy_minus_sign: | The port to connect to. | 3306 | -| `source_type` | [models.SourceMysqlMysql](../models/sourcemysqlmysql.md) | :heavy_check_mark: | N/A | | -| `ssl` | *Optional[bool]* | :heavy_minus_sign: | Encrypt data using SSL. | | -| `ssl_mode` | [Optional[models.SourceMysqlSSLModes]](../models/sourcemysqlsslmodes.md) | :heavy_minus_sign: | SSL connection modes. Read more in the docs. | | -| `tunnel_method` | [Optional[models.SourceMysqlSSHTunnelMethod]](../models/sourcemysqlsshtunnelmethod.md) | :heavy_minus_sign: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `database` | *str* | :heavy_check_mark: | The database name. | +| `host` | *str* | :heavy_check_mark: | Hostname of the database. | +| `replication_method` | [models.SourceMysqlUpdateMethod](../models/sourcemysqlupdatemethod.md) | :heavy_check_mark: | Configures how data is extracted from the database. | +| `username` | *str* | :heavy_check_mark: | The username which is used to access the database. | +| `check_privileges` | *Optional[bool]* | :heavy_minus_sign: | When this feature is enabled, during schema discovery the connector will query each table or view individually to check access privileges and inaccessible tables, views, or columns therein will be removed. In large schemas, this might cause schema discovery to take too long, in which case it might be advisable to disable this feature. | +| `checkpoint_target_interval_seconds` | *Optional[int]* | :heavy_minus_sign: | How often (in seconds) a stream should checkpoint, when possible. | +| `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). | +| `max_db_connections` | *Optional[int]* | :heavy_minus_sign: | Maximum number of concurrent queries to the database. Leave empty to let Airbyte optimize performance. | +| `password` | *Optional[str]* | :heavy_minus_sign: | The password associated with the username. | +| `port` | *Optional[int]* | :heavy_minus_sign: | Port of the database. | +| `source_type` | [models.SourceMysqlMysql](../models/sourcemysqlmysql.md) | :heavy_check_mark: | N/A | +| `ssl_mode` | [Optional[models.SourceMysqlEncryption]](../models/sourcemysqlencryption.md) | :heavy_minus_sign: | The encryption method which is used when communicating with the database. | +| `tunnel_method` | [Optional[models.SourceMysqlSSHTunnelMethod]](../models/sourcemysqlsshtunnelmethod.md) | :heavy_minus_sign: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | \ No newline at end of file diff --git a/docs/models/sourcemysqlsslmodes.md b/docs/models/sourcemysqlencryption.md similarity index 54% rename from docs/models/sourcemysqlsslmodes.md rename to docs/models/sourcemysqlencryption.md index afb302c8..cb686be9 100644 --- a/docs/models/sourcemysqlsslmodes.md +++ b/docs/models/sourcemysqlencryption.md @@ -1,6 +1,6 @@ -# SourceMysqlSSLModes +# SourceMysqlEncryption -SSL connection modes. Read more in the docs. +The encryption method which is used when communicating with the database. ## Supported Types @@ -17,10 +17,10 @@ value: models.Preferred = /* values here */ value: models.Required = /* values here */ ``` -### `models.SourceMysqlVerifyCA` +### `models.SourceMysqlVerifyCa` ```python -value: models.SourceMysqlVerifyCA = /* values here */ +value: models.SourceMysqlVerifyCa = /* values here */ ``` ### `models.VerifyIdentity` diff --git a/docs/models/sourcemysqlinvalidcdcpositionbehavioradvanced.md b/docs/models/sourcemysqlinvalidcdcpositionbehavioradvanced.md index 5baf754b..5f5f3a9d 100644 --- a/docs/models/sourcemysqlinvalidcdcpositionbehavioradvanced.md +++ b/docs/models/sourcemysqlinvalidcdcpositionbehavioradvanced.md @@ -1,6 +1,6 @@ # SourceMysqlInvalidCDCPositionBehaviorAdvanced -Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss. +Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value in the mined logs. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss. ## Values diff --git a/docs/models/sourcemysqlmethod.md b/docs/models/sourcemysqlmethod.md index 73f95f02..8d9a4b56 100644 --- a/docs/models/sourcemysqlmethod.md +++ b/docs/models/sourcemysqlmethod.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ----- | ----- | -| `CDC` | CDC | \ No newline at end of file +| Name | Value | +| ---------- | ---------- | +| `STANDARD` | STANDARD | \ No newline at end of file diff --git a/docs/models/sourcemysqlnotunnel.md b/docs/models/sourcemysqlnotunnel.md index 24380a31..d82b4dc3 100644 --- a/docs/models/sourcemysqlnotunnel.md +++ b/docs/models/sourcemysqlnotunnel.md @@ -1,8 +1,11 @@ # SourceMysqlNoTunnel +No ssh tunnel needed to connect to database + ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | -| `tunnel_method` | [models.SourceMysqlTunnelMethod](../models/sourcemysqltunnelmethod.md) | :heavy_check_mark: | No ssh tunnel needed to connect to database | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `tunnel_method` | [Optional[models.SourceMysqlTunnelMethod]](../models/sourcemysqltunnelmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcemysqlpasswordauthentication.md b/docs/models/sourcemysqlpasswordauthentication.md index 183ab030..15efae91 100644 --- a/docs/models/sourcemysqlpasswordauthentication.md +++ b/docs/models/sourcemysqlpasswordauthentication.md @@ -1,12 +1,15 @@ # SourceMysqlPasswordAuthentication +Connect through a jump server tunnel host using username and password authentication + ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | -| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | | -| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host | | -| `tunnel_user_password` | *str* | :heavy_check_mark: | OS-level password for logging into the jump server host | | -| `tunnel_method` | [models.SourceMysqlSchemasTunnelMethodTunnelMethod](../models/sourcemysqlschemastunnelmethodtunnelmethod.md) | :heavy_check_mark: | Connect through a jump server tunnel host using username and password authentication | | -| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | 22 | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | +| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | +| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host | +| `tunnel_user_password` | *str* | :heavy_check_mark: | OS-level password for logging into the jump server host | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `tunnel_method` | [Optional[models.SourceMysqlSchemasTunnelMethodTunnelMethod]](../models/sourcemysqlschemastunnelmethodtunnelmethod.md) | :heavy_minus_sign: | N/A | +| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | \ No newline at end of file diff --git a/docs/models/readchangesusingbinarylogcdc.md b/docs/models/sourcemysqlreadchangesusingchangedatacapturecdc.md similarity index 69% rename from docs/models/readchangesusingbinarylogcdc.md rename to docs/models/sourcemysqlreadchangesusingchangedatacapturecdc.md index f7fe93f4..672e3f2f 100644 --- a/docs/models/readchangesusingbinarylogcdc.md +++ b/docs/models/sourcemysqlreadchangesusingchangedatacapturecdc.md @@ -1,14 +1,14 @@ -# ReadChangesUsingBinaryLogCDC +# SourceMysqlReadChangesUsingChangeDataCaptureCDC -Recommended - Incrementally reads new inserts, updates, and deletes using the MySQL binary log. This must be enabled on your database. +Recommended - Incrementally reads new inserts, updates, and deletes using MySQL's change data capture feature. This must be enabled on your database. ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `initial_load_timeout_hours` | *Optional[int]* | :heavy_minus_sign: | The amount of time an initial load is allowed to continue for before catching up on CDC logs. | -| `initial_waiting_seconds` | *Optional[int]* | :heavy_minus_sign: | The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time. | -| `invalid_cdc_cursor_position_behavior` | [Optional[models.SourceMysqlInvalidCDCPositionBehaviorAdvanced]](../models/sourcemysqlinvalidcdcpositionbehavioradvanced.md) | :heavy_minus_sign: | Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss. | -| `method` | [models.SourceMysqlMethod](../models/sourcemysqlmethod.md) | :heavy_check_mark: | N/A | -| `server_time_zone` | *Optional[str]* | :heavy_minus_sign: | Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard. | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `initial_load_timeout_hours` | *Optional[int]* | :heavy_minus_sign: | The amount of time an initial load is allowed to continue for before catching up on CDC logs. | +| `invalid_cdc_cursor_position_behavior` | [Optional[models.SourceMysqlInvalidCDCPositionBehaviorAdvanced]](../models/sourcemysqlinvalidcdcpositionbehavioradvanced.md) | :heavy_minus_sign: | Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value in the mined logs. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss. | +| `method` | [Optional[models.SourceMysqlSchemasMethod]](../models/sourcemysqlschemasmethod.md) | :heavy_minus_sign: | N/A | +| `server_timezone` | *Optional[str]* | :heavy_minus_sign: | Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard. | \ No newline at end of file diff --git a/docs/models/sourcemysqlscanchangeswithuserdefinedcursor.md b/docs/models/sourcemysqlscanchangeswithuserdefinedcursor.md index 13daa05f..067f33d7 100644 --- a/docs/models/sourcemysqlscanchangeswithuserdefinedcursor.md +++ b/docs/models/sourcemysqlscanchangeswithuserdefinedcursor.md @@ -5,6 +5,7 @@ Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `cursor_method` | [Optional[models.CursorMethod]](../models/cursormethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcenetsuiteenterpriseschemasauthenticationmethod.md b/docs/models/sourcenetsuiteenterpriseschemasauthenticationmethod.md new file mode 100644 index 00000000..ee90247a --- /dev/null +++ b/docs/models/sourcenetsuiteenterpriseschemasauthenticationmethod.md @@ -0,0 +1,8 @@ +# SourceNetsuiteEnterpriseSchemasAuthenticationMethod + + +## Values + +| Name | Value | +| ---------------------------- | ---------------------------- | +| `TOKEN_BASED_AUTHENTICATION` | token_based_authentication | \ No newline at end of file diff --git a/docs/models/sourcenetsuiteenterpriseschemasauthenticationmethodauthenticationmethod.md b/docs/models/sourcenetsuiteenterpriseschemasauthenticationmethodauthenticationmethod.md new file mode 100644 index 00000000..3e8ed80f --- /dev/null +++ b/docs/models/sourcenetsuiteenterpriseschemasauthenticationmethodauthenticationmethod.md @@ -0,0 +1,8 @@ +# SourceNetsuiteEnterpriseSchemasAuthenticationMethodAuthenticationMethod + + +## Values + +| Name | Value | +| ----------------------- | ----------------------- | +| `OAUTH2_AUTHENTICATION` | oauth2_authentication | \ No newline at end of file diff --git a/docs/models/sourcenetsuiteenterpriseschemasauthenticationmethodauthenticationmethodauthenticationmethod.md b/docs/models/sourcenetsuiteenterpriseschemasauthenticationmethodauthenticationmethodauthenticationmethod.md new file mode 100644 index 00000000..6b855c36 --- /dev/null +++ b/docs/models/sourcenetsuiteenterpriseschemasauthenticationmethodauthenticationmethodauthenticationmethod.md @@ -0,0 +1,8 @@ +# SourceNetsuiteEnterpriseSchemasAuthenticationMethodAuthenticationMethodAuthenticationMethod + + +## Values + +| Name | Value | +| ------------------------- | ------------------------- | +| `PASSWORD_AUTHENTICATION` | password_authentication | \ No newline at end of file diff --git a/docs/models/sourcenetsuiteenterpriseschemaspasswordauthentication.md b/docs/models/sourcenetsuiteenterpriseschemaspasswordauthentication.md new file mode 100644 index 00000000..36e85892 --- /dev/null +++ b/docs/models/sourcenetsuiteenterpriseschemaspasswordauthentication.md @@ -0,0 +1,15 @@ +# SourceNetsuiteEnterpriseSchemasPasswordAuthentication + +Connect through a jump server tunnel host using username and password authentication + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | +| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | +| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host | +| `tunnel_user_password` | *str* | :heavy_check_mark: | OS-level password for logging into the jump server host | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `tunnel_method` | [Optional[models.SourceNetsuiteEnterpriseSchemasTunnelMethodTunnelMethod]](../models/sourcenetsuiteenterpriseschemastunnelmethodtunnelmethod.md) | :heavy_minus_sign: | N/A | +| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | \ No newline at end of file diff --git a/docs/models/sourcenetsuiteenterpriseschemastunnelmethod.md b/docs/models/sourcenetsuiteenterpriseschemastunnelmethod.md new file mode 100644 index 00000000..23de35ce --- /dev/null +++ b/docs/models/sourcenetsuiteenterpriseschemastunnelmethod.md @@ -0,0 +1,8 @@ +# SourceNetsuiteEnterpriseSchemasTunnelMethod + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `SSH_KEY_AUTH` | SSH_KEY_AUTH | \ No newline at end of file diff --git a/docs/models/sourcenetsuiteenterpriseschemastunnelmethodtunnelmethod.md b/docs/models/sourcenetsuiteenterpriseschemastunnelmethodtunnelmethod.md new file mode 100644 index 00000000..330310a9 --- /dev/null +++ b/docs/models/sourcenetsuiteenterpriseschemastunnelmethodtunnelmethod.md @@ -0,0 +1,8 @@ +# SourceNetsuiteEnterpriseSchemasTunnelMethodTunnelMethod + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `SSH_PASSWORD_AUTH` | SSH_PASSWORD_AUTH | \ No newline at end of file diff --git a/docs/models/sourcenetsuiteenterprisesshkeyauthentication.md b/docs/models/sourcenetsuiteenterprisesshkeyauthentication.md new file mode 100644 index 00000000..4440f764 --- /dev/null +++ b/docs/models/sourcenetsuiteenterprisesshkeyauthentication.md @@ -0,0 +1,15 @@ +# SourceNetsuiteEnterpriseSSHKeyAuthentication + +Connect through a jump server tunnel host using username and ssh key + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------ | +| `ssh_key` | *str* | :heavy_check_mark: | OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa ) | +| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | +| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `tunnel_method` | [Optional[models.SourceNetsuiteEnterpriseSchemasTunnelMethod]](../models/sourcenetsuiteenterpriseschemastunnelmethod.md) | :heavy_minus_sign: | N/A | +| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | \ No newline at end of file diff --git a/docs/models/sourcenetsuiteenterprisesshtunnelmethod.md b/docs/models/sourcenetsuiteenterprisesshtunnelmethod.md new file mode 100644 index 00000000..e82d260a --- /dev/null +++ b/docs/models/sourcenetsuiteenterprisesshtunnelmethod.md @@ -0,0 +1,25 @@ +# SourceNetsuiteEnterpriseSSHTunnelMethod + +Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. + + +## Supported Types + +### `models.SourceNetsuiteEnterpriseNoTunnel` + +```python +value: models.SourceNetsuiteEnterpriseNoTunnel = /* values here */ +``` + +### `models.SourceNetsuiteEnterpriseSSHKeyAuthentication` + +```python +value: models.SourceNetsuiteEnterpriseSSHKeyAuthentication = /* values here */ +``` + +### `models.SourceNetsuiteEnterpriseSchemasPasswordAuthentication` + +```python +value: models.SourceNetsuiteEnterpriseSchemasPasswordAuthentication = /* values here */ +``` + diff --git a/docs/models/sourcenetsuiteenterprisetunnelmethod.md b/docs/models/sourcenetsuiteenterprisetunnelmethod.md new file mode 100644 index 00000000..efc0bf30 --- /dev/null +++ b/docs/models/sourcenetsuiteenterprisetunnelmethod.md @@ -0,0 +1,8 @@ +# SourceNetsuiteEnterpriseTunnelMethod + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `NO_TUNNEL` | NO_TUNNEL | \ No newline at end of file diff --git a/docs/models/sourcenetsuiteenterpriseupdatemethod.md b/docs/models/sourcenetsuiteenterpriseupdatemethod.md new file mode 100644 index 00000000..6849aa80 --- /dev/null +++ b/docs/models/sourcenetsuiteenterpriseupdatemethod.md @@ -0,0 +1,13 @@ +# SourceNetsuiteEnterpriseUpdateMethod + +Configures how data is extracted from the database. + + +## Supported Types + +### `models.SourceNetsuiteEnterpriseScanChangesWithUserDefinedCursor` + +```python +value: models.SourceNetsuiteEnterpriseScanChangesWithUserDefinedCursor = /* values here */ +``` + diff --git a/docs/models/sourcenewsdata.md b/docs/models/sourcenewsdata.md new file mode 100644 index 00000000..8437cd04 --- /dev/null +++ b/docs/models/sourcenewsdata.md @@ -0,0 +1,14 @@ +# SourceNewsdata + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | API Key | +| `one_of` | *Optional[Any]* | :heavy_minus_sign: | N/A | +| `category` | List[[models.SourceNewsdataCategory](../models/sourcenewsdatacategory.md)] | :heavy_minus_sign: | Categories (maximum 5) to restrict the search to. | +| `country` | List[[models.SourceNewsdataCountry](../models/sourcenewsdatacountry.md)] | :heavy_minus_sign: | 2-letter ISO 3166-1 countries (maximum 5) to restrict the search to. | +| `domain` | List[*str*] | :heavy_minus_sign: | Domains (maximum 5) to restrict the search to. Use the sources stream to find top sources id. | +| `language` | List[[models.SourceNewsdataLanguage](../models/sourcenewsdatalanguage.md)] | :heavy_minus_sign: | Languages (maximum 5) to restrict the search to. | +| `source_type` | [models.Newsdata](../models/newsdata.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcenewsdatacategory.md b/docs/models/sourcenewsdatacategory.md new file mode 100644 index 00000000..d914dc84 --- /dev/null +++ b/docs/models/sourcenewsdatacategory.md @@ -0,0 +1,18 @@ +# SourceNewsdataCategory + + +## Values + +| Name | Value | +| --------------- | --------------- | +| `BUSINESS` | business | +| `ENTERTAINMENT` | entertainment | +| `ENVIRONMENT` | environment | +| `FOOD` | food | +| `HEALTH` | health | +| `POLITICS` | politics | +| `SCIENCE` | science | +| `SPORTS` | sports | +| `TECHNOLOGY` | technology | +| `TOP` | top | +| `WORLD` | world | \ No newline at end of file diff --git a/docs/models/sourcenewsdatacountry.md b/docs/models/sourcenewsdatacountry.md new file mode 100644 index 00000000..9822d56d --- /dev/null +++ b/docs/models/sourcenewsdatacountry.md @@ -0,0 +1,83 @@ +# SourceNewsdataCountry + + +## Values + +| Name | Value | +| ----- | ----- | +| `AR` | ar | +| `AU` | au | +| `AT` | at | +| `BD` | bd | +| `BY` | by | +| `BE` | be | +| `BR` | br | +| `BG` | bg | +| `CA` | ca | +| `CL` | cl | +| `CN` | cn | +| `CO` | co | +| `CR` | cr | +| `CU` | cu | +| `CZ` | cz | +| `DK` | dk | +| `DO` | do | +| `EC` | ec | +| `EG` | eg | +| `EE` | ee | +| `ET` | et | +| `FI` | fi | +| `FR` | fr | +| `DE` | de | +| `GR` | gr | +| `HK` | hk | +| `HU` | hu | +| `IN` | in | +| `ID` | id | +| `IQ` | iq | +| `IE` | ie | +| `IL` | il | +| `IT` | it | +| `JP` | jp | +| `KZ` | kz | +| `KW` | kw | +| `LV` | lv | +| `LB` | lb | +| `LT` | lt | +| `MY` | my | +| `MX` | mx | +| `MA` | ma | +| `MM` | mm | +| `NL` | nl | +| `NZ` | nz | +| `NG` | ng | +| `KP` | kp | +| `NO` | no | +| `PK` | pk | +| `PE` | pe | +| `PH` | ph | +| `PL` | pl | +| `PT` | pt | +| `PR` | pr | +| `RO` | ro | +| `RU` | ru | +| `SA` | sa | +| `RS` | rs | +| `SG` | sg | +| `SK` | sk | +| `SI` | si | +| `ZA` | za | +| `KR` | kr | +| `ES` | es | +| `SE` | se | +| `CH` | ch | +| `TW` | tw | +| `TZ` | tz | +| `TH` | th | +| `TR` | tr | +| `UA` | ua | +| `AE` | ae | +| `GB` | gb | +| `US` | us | +| `VE` | ve | +| `VI` | vi | \ No newline at end of file diff --git a/docs/models/sourcenewsdatalanguage.md b/docs/models/sourcenewsdatalanguage.md new file mode 100644 index 00000000..2ac9dfbe --- /dev/null +++ b/docs/models/sourcenewsdatalanguage.md @@ -0,0 +1,52 @@ +# SourceNewsdataLanguage + + +## Values + +| Name | Value | +| ----- | ----- | +| `BE` | be | +| `AM` | am | +| `AR` | ar | +| `BN` | bn | +| `BS` | bs | +| `BG` | bg | +| `MY` | my | +| `CKB` | ckb | +| `ZH` | zh | +| `HR` | hr | +| `CS` | cs | +| `DA` | da | +| `NL` | nl | +| `EN` | en | +| `ET` | et | +| `FI` | fi | +| `FR` | fr | +| `DE` | de | +| `EL` | el | +| `HE` | he | +| `HI` | hi | +| `HU` | hu | +| `IN` | in | +| `IT` | it | +| `JP` | jp | +| `KO` | ko | +| `LV` | lv | +| `LT` | lt | +| `MS` | ms | +| `NO` | no | +| `PL` | pl | +| `PT` | pt | +| `RO` | ro | +| `RU` | ru | +| `SR` | sr | +| `SK` | sk | +| `SL` | sl | +| `ES` | es | +| `SW` | sw | +| `SV` | sv | +| `TH` | th | +| `TR` | tr | +| `UK` | uk | +| `UR` | ur | +| `VI` | vi | \ No newline at end of file diff --git a/docs/models/sourcenexiopay.md b/docs/models/sourcenexiopay.md new file mode 100644 index 00000000..852916b4 --- /dev/null +++ b/docs/models/sourcenexiopay.md @@ -0,0 +1,12 @@ +# SourceNexiopay + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your Nexio API key (password). You can find it in the Nexio Dashboard under Settings > User Management. Select the API user and copy the API key. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `username` | *str* | :heavy_check_mark: | Your Nexio API username. You can find it in the Nexio Dashboard under Settings > User Management. Select the API user and copy the username. | +| `source_type` | [models.Nexiopay](../models/nexiopay.md) | :heavy_check_mark: | N/A | +| `subdomain` | [Optional[models.Subdomain]](../models/subdomain.md) | :heavy_minus_sign: | The subdomain for the Nexio API environment, such as 'nexiopaysandbox' or 'nexiopay'. | \ No newline at end of file diff --git a/docs/models/sourceninjaonermm.md b/docs/models/sourceninjaonermm.md new file mode 100644 index 00000000..a1d8e081 --- /dev/null +++ b/docs/models/sourceninjaonermm.md @@ -0,0 +1,10 @@ +# SourceNinjaoneRmm + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Token could be generated natively by authorize section of NinjaOne swagger documentation `https://app.ninjarmm.com/apidocs/?links.active=authorization` | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.NinjaoneRmm](../models/ninjaonermm.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceopenexchangerates.md b/docs/models/sourceopenexchangerates.md new file mode 100644 index 00000000..199af2a9 --- /dev/null +++ b/docs/models/sourceopenexchangerates.md @@ -0,0 +1,11 @@ +# SourceOpenExchangeRates + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | +| `app_id` | *str* | :heavy_check_mark: | App ID provided by Open Exchange Rates | | +| `start_date` | *str* | :heavy_check_mark: | Start getting data from that date. | YYYY-MM-DD | +| `base` | *Optional[str]* | :heavy_minus_sign: | Change base currency (3-letter code, default is USD - only modifiable in paid plans) | EUR | +| `source_type` | [models.OpenExchangeRates](../models/openexchangerates.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourceopuswatch.md b/docs/models/sourceopuswatch.md new file mode 100644 index 00000000..2b0e6c6a --- /dev/null +++ b/docs/models/sourceopuswatch.md @@ -0,0 +1,10 @@ +# SourceOpuswatch + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `source_type` | [models.Opuswatch](../models/opuswatch.md) | :heavy_check_mark: | N/A | +| `start_date` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourceoracleenterprise.md b/docs/models/sourceoracleenterprise.md new file mode 100644 index 00000000..3a289cf2 --- /dev/null +++ b/docs/models/sourceoracleenterprise.md @@ -0,0 +1,21 @@ +# SourceOracleEnterprise + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `connection_data` | [models.SourceOracleEnterpriseConnectBy](../models/sourceoracleenterpriseconnectby.md) | :heavy_check_mark: | The scheme by which to establish a database connection. | +| `cursor` | [models.SourceOracleEnterpriseUpdateMethod](../models/sourceoracleenterpriseupdatemethod.md) | :heavy_check_mark: | Configures how data is extracted from the database. | +| `encryption` | [models.SourceOracleEnterpriseEncryption](../models/sourceoracleenterpriseencryption.md) | :heavy_check_mark: | The encryption method with is used when communicating with the database. | +| `host` | *str* | :heavy_check_mark: | Hostname of the database. | +| `tunnel_method` | [models.SourceOracleEnterpriseSSHTunnelMethod](../models/sourceoracleenterprisesshtunnelmethod.md) | :heavy_check_mark: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | +| `username` | *str* | :heavy_check_mark: | The username which is used to access the database. | +| `check_privileges` | *Optional[bool]* | :heavy_minus_sign: | When this feature is enabled, during schema discovery the connector will query each table or view individually to check access privileges and inaccessible tables, views, or columns therein will be removed. In large schemas, this might cause schema discovery to take too long, in which case it might be advisable to disable this feature. | +| `checkpoint_target_interval_seconds` | *Optional[int]* | :heavy_minus_sign: | How often (in seconds) a stream should checkpoint, when possible. | +| `concurrency` | *Optional[int]* | :heavy_minus_sign: | Maximum number of concurrent queries to the database. | +| `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). | +| `password` | *Optional[str]* | :heavy_minus_sign: | The password associated with the username. | +| `port` | *Optional[int]* | :heavy_minus_sign: | Port of the database.
Oracle Corporations recommends the following port numbers:
1521 - Default listening port for client connections to the listener.
2484 - Recommended and officially registered listening port for client connections to the listener using TCP/IP with SSL. | +| `schemas` | List[*str*] | :heavy_minus_sign: | The list of schemas to sync from. Defaults to user. Case sensitive. | +| `source_type` | [models.OracleEnterprise](../models/oracleenterprise.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceoracleenterpriseconnectby.md b/docs/models/sourceoracleenterpriseconnectby.md new file mode 100644 index 00000000..3327fcd4 --- /dev/null +++ b/docs/models/sourceoracleenterpriseconnectby.md @@ -0,0 +1,19 @@ +# SourceOracleEnterpriseConnectBy + +The scheme by which to establish a database connection. + + +## Supported Types + +### `models.SourceOracleEnterpriseServiceName` + +```python +value: models.SourceOracleEnterpriseServiceName = /* values here */ +``` + +### `models.SourceOracleEnterpriseSystemIDSID` + +```python +value: models.SourceOracleEnterpriseSystemIDSID = /* values here */ +``` + diff --git a/docs/models/sourceoracleenterpriseconnectiontype.md b/docs/models/sourceoracleenterpriseconnectiontype.md new file mode 100644 index 00000000..28f92d6c --- /dev/null +++ b/docs/models/sourceoracleenterpriseconnectiontype.md @@ -0,0 +1,8 @@ +# SourceOracleEnterpriseConnectionType + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `SERVICE_NAME` | service_name | \ No newline at end of file diff --git a/docs/models/sourceoracleenterprisecursormethod.md b/docs/models/sourceoracleenterprisecursormethod.md new file mode 100644 index 00000000..9bc0790f --- /dev/null +++ b/docs/models/sourceoracleenterprisecursormethod.md @@ -0,0 +1,8 @@ +# SourceOracleEnterpriseCursorMethod + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `USER_DEFINED` | user_defined | \ No newline at end of file diff --git a/docs/models/sourceoracleenterpriseencryption.md b/docs/models/sourceoracleenterpriseencryption.md new file mode 100644 index 00000000..8ecc5186 --- /dev/null +++ b/docs/models/sourceoracleenterpriseencryption.md @@ -0,0 +1,25 @@ +# SourceOracleEnterpriseEncryption + +The encryption method with is used when communicating with the database. + + +## Supported Types + +### `models.SourceOracleEnterpriseUnencrypted` + +```python +value: models.SourceOracleEnterpriseUnencrypted = /* values here */ +``` + +### `models.SourceOracleEnterpriseNativeNetworkEncryptionNNE` + +```python +value: models.SourceOracleEnterpriseNativeNetworkEncryptionNNE = /* values here */ +``` + +### `models.SourceOracleEnterpriseTLSEncryptedVerifyCertificate` + +```python +value: models.SourceOracleEnterpriseTLSEncryptedVerifyCertificate = /* values here */ +``` + diff --git a/docs/models/sourceoracleenterpriseencryptionalgorithm.md b/docs/models/sourceoracleenterpriseencryptionalgorithm.md new file mode 100644 index 00000000..0b42e69f --- /dev/null +++ b/docs/models/sourceoracleenterpriseencryptionalgorithm.md @@ -0,0 +1,15 @@ +# SourceOracleEnterpriseEncryptionAlgorithm + +This parameter defines what encryption algorithm is used. + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `AES256` | AES256 | +| `AES192` | AES192 | +| `AES128` | AES128 | +| `THREE_DES168` | 3DES168 | +| `THREE_DES112` | 3DES112 | +| `DES` | DES | \ No newline at end of file diff --git a/docs/models/sourceoracleenterpriseencryptionmethod.md b/docs/models/sourceoracleenterpriseencryptionmethod.md new file mode 100644 index 00000000..01b6584c --- /dev/null +++ b/docs/models/sourceoracleenterpriseencryptionmethod.md @@ -0,0 +1,8 @@ +# SourceOracleEnterpriseEncryptionMethod + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `UNENCRYPTED` | unencrypted | \ No newline at end of file diff --git a/docs/models/sourceoracleenterpriseinvalidcdcpositionbehavioradvanced.md b/docs/models/sourceoracleenterpriseinvalidcdcpositionbehavioradvanced.md new file mode 100644 index 00000000..621794ca --- /dev/null +++ b/docs/models/sourceoracleenterpriseinvalidcdcpositionbehavioradvanced.md @@ -0,0 +1,11 @@ +# SourceOracleEnterpriseInvalidCDCPositionBehaviorAdvanced + +Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value in the mined logs. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss. + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `FAIL_SYNC` | Fail sync | +| `RE_SYNC_DATA` | Re-sync data | \ No newline at end of file diff --git a/docs/models/sourceoracleenterprisenativenetworkencryptionnne.md b/docs/models/sourceoracleenterprisenativenetworkencryptionnne.md new file mode 100644 index 00000000..ca57e740 --- /dev/null +++ b/docs/models/sourceoracleenterprisenativenetworkencryptionnne.md @@ -0,0 +1,12 @@ +# SourceOracleEnterpriseNativeNetworkEncryptionNNE + +The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports. + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `encryption_algorithm` | [Optional[models.SourceOracleEnterpriseEncryptionAlgorithm]](../models/sourceoracleenterpriseencryptionalgorithm.md) | :heavy_minus_sign: | This parameter defines what encryption algorithm is used. | +| `encryption_method` | [Optional[models.SourceOracleEnterpriseSchemasEncryptionMethod]](../models/sourceoracleenterpriseschemasencryptionmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourceoracleenterprisenotunnel.md b/docs/models/sourceoracleenterprisenotunnel.md new file mode 100644 index 00000000..4d8c5010 --- /dev/null +++ b/docs/models/sourceoracleenterprisenotunnel.md @@ -0,0 +1,11 @@ +# SourceOracleEnterpriseNoTunnel + +No ssh tunnel needed to connect to database + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `tunnel_method` | [Optional[models.SourceOracleEnterpriseTunnelMethod]](../models/sourceoracleenterprisetunnelmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourceoracleenterprisepasswordauthentication.md b/docs/models/sourceoracleenterprisepasswordauthentication.md new file mode 100644 index 00000000..873cc1f4 --- /dev/null +++ b/docs/models/sourceoracleenterprisepasswordauthentication.md @@ -0,0 +1,15 @@ +# SourceOracleEnterprisePasswordAuthentication + +Connect through a jump server tunnel host using username and password authentication + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------- | +| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | +| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host | +| `tunnel_user_password` | *str* | :heavy_check_mark: | OS-level password for logging into the jump server host | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `tunnel_method` | [Optional[models.SourceOracleEnterpriseSchemasTunnelMethodTunnelMethod]](../models/sourceoracleenterpriseschemastunnelmethodtunnelmethod.md) | :heavy_minus_sign: | N/A | +| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | \ No newline at end of file diff --git a/docs/models/sourceoracleenterprisereadchangesusingchangedatacapturecdc.md b/docs/models/sourceoracleenterprisereadchangesusingchangedatacapturecdc.md new file mode 100644 index 00000000..2b04fa95 --- /dev/null +++ b/docs/models/sourceoracleenterprisereadchangesusingchangedatacapturecdc.md @@ -0,0 +1,14 @@ +# SourceOracleEnterpriseReadChangesUsingChangeDataCaptureCDC + +Recommended - Incrementally reads new inserts, updates, and deletes using Oracle's change data capture feature. This must be enabled on your database. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `cursor_method` | [Optional[models.SourceOracleEnterpriseSchemasCursorMethod]](../models/sourceoracleenterpriseschemascursormethod.md) | :heavy_minus_sign: | N/A | +| `debezium_shutdown_timeout_seconds` | *Optional[int]* | :heavy_minus_sign: | The amount of time to allow the Debezium Engine to shut down, in seconds. | +| `initial_load_timeout_hours` | *Optional[int]* | :heavy_minus_sign: | The amount of time an initial load is allowed to continue for before catching up on CDC events. | +| `invalid_cdc_cursor_position_behavior` | [Optional[models.SourceOracleEnterpriseInvalidCDCPositionBehaviorAdvanced]](../models/sourceoracleenterpriseinvalidcdcpositionbehavioradvanced.md) | :heavy_minus_sign: | Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value in the mined logs. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss. | \ No newline at end of file diff --git a/docs/models/sourceoracleenterprisescanchangeswithuserdefinedcursor.md b/docs/models/sourceoracleenterprisescanchangeswithuserdefinedcursor.md new file mode 100644 index 00000000..6d8df228 --- /dev/null +++ b/docs/models/sourceoracleenterprisescanchangeswithuserdefinedcursor.md @@ -0,0 +1,11 @@ +# SourceOracleEnterpriseScanChangesWithUserDefinedCursor + +Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `cursor_method` | [Optional[models.SourceOracleEnterpriseCursorMethod]](../models/sourceoracleenterprisecursormethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourceoracleenterpriseschemasconnectiontype.md b/docs/models/sourceoracleenterpriseschemasconnectiontype.md new file mode 100644 index 00000000..24b63e9e --- /dev/null +++ b/docs/models/sourceoracleenterpriseschemasconnectiontype.md @@ -0,0 +1,8 @@ +# SourceOracleEnterpriseSchemasConnectionType + + +## Values + +| Name | Value | +| ----- | ----- | +| `SID` | sid | \ No newline at end of file diff --git a/docs/models/sourceoracleenterpriseschemascursormethod.md b/docs/models/sourceoracleenterpriseschemascursormethod.md new file mode 100644 index 00000000..42ec3337 --- /dev/null +++ b/docs/models/sourceoracleenterpriseschemascursormethod.md @@ -0,0 +1,8 @@ +# SourceOracleEnterpriseSchemasCursorMethod + + +## Values + +| Name | Value | +| ----- | ----- | +| `CDC` | cdc | \ No newline at end of file diff --git a/docs/models/sourceoracleenterpriseschemasencryptionencryptionmethod.md b/docs/models/sourceoracleenterpriseschemasencryptionencryptionmethod.md new file mode 100644 index 00000000..51aa6cf1 --- /dev/null +++ b/docs/models/sourceoracleenterpriseschemasencryptionencryptionmethod.md @@ -0,0 +1,8 @@ +# SourceOracleEnterpriseSchemasEncryptionEncryptionMethod + + +## Values + +| Name | Value | +| ------------------------------ | ------------------------------ | +| `ENCRYPTED_VERIFY_CERTIFICATE` | encrypted_verify_certificate | \ No newline at end of file diff --git a/docs/models/sourceoracleenterpriseschemasencryptionmethod.md b/docs/models/sourceoracleenterpriseschemasencryptionmethod.md new file mode 100644 index 00000000..232666d5 --- /dev/null +++ b/docs/models/sourceoracleenterpriseschemasencryptionmethod.md @@ -0,0 +1,8 @@ +# SourceOracleEnterpriseSchemasEncryptionMethod + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `CLIENT_NNE` | client_nne | \ No newline at end of file diff --git a/docs/models/sourceoracleenterpriseschemastunnelmethod.md b/docs/models/sourceoracleenterpriseschemastunnelmethod.md new file mode 100644 index 00000000..5e216577 --- /dev/null +++ b/docs/models/sourceoracleenterpriseschemastunnelmethod.md @@ -0,0 +1,8 @@ +# SourceOracleEnterpriseSchemasTunnelMethod + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `SSH_KEY_AUTH` | SSH_KEY_AUTH | \ No newline at end of file diff --git a/docs/models/sourceoracleenterpriseschemastunnelmethodtunnelmethod.md b/docs/models/sourceoracleenterpriseschemastunnelmethodtunnelmethod.md new file mode 100644 index 00000000..1b3cb1d7 --- /dev/null +++ b/docs/models/sourceoracleenterpriseschemastunnelmethodtunnelmethod.md @@ -0,0 +1,8 @@ +# SourceOracleEnterpriseSchemasTunnelMethodTunnelMethod + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `SSH_PASSWORD_AUTH` | SSH_PASSWORD_AUTH | \ No newline at end of file diff --git a/docs/models/sourceoracleenterpriseservicename.md b/docs/models/sourceoracleenterpriseservicename.md new file mode 100644 index 00000000..306d84e2 --- /dev/null +++ b/docs/models/sourceoracleenterpriseservicename.md @@ -0,0 +1,12 @@ +# SourceOracleEnterpriseServiceName + +Use service name. + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | +| `service_name` | *str* | :heavy_check_mark: | N/A | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `connection_type` | [Optional[models.SourceOracleEnterpriseConnectionType]](../models/sourceoracleenterpriseconnectiontype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourceoracleenterprisesshkeyauthentication.md b/docs/models/sourceoracleenterprisesshkeyauthentication.md new file mode 100644 index 00000000..db56eb08 --- /dev/null +++ b/docs/models/sourceoracleenterprisesshkeyauthentication.md @@ -0,0 +1,15 @@ +# SourceOracleEnterpriseSSHKeyAuthentication + +Connect through a jump server tunnel host using username and ssh key + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | +| `ssh_key` | *str* | :heavy_check_mark: | OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa ) | +| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | +| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `tunnel_method` | [Optional[models.SourceOracleEnterpriseSchemasTunnelMethod]](../models/sourceoracleenterpriseschemastunnelmethod.md) | :heavy_minus_sign: | N/A | +| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | \ No newline at end of file diff --git a/docs/models/sourceoracleenterprisesshtunnelmethod.md b/docs/models/sourceoracleenterprisesshtunnelmethod.md new file mode 100644 index 00000000..26e7151e --- /dev/null +++ b/docs/models/sourceoracleenterprisesshtunnelmethod.md @@ -0,0 +1,25 @@ +# SourceOracleEnterpriseSSHTunnelMethod + +Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. + + +## Supported Types + +### `models.SourceOracleEnterpriseNoTunnel` + +```python +value: models.SourceOracleEnterpriseNoTunnel = /* values here */ +``` + +### `models.SourceOracleEnterpriseSSHKeyAuthentication` + +```python +value: models.SourceOracleEnterpriseSSHKeyAuthentication = /* values here */ +``` + +### `models.SourceOracleEnterprisePasswordAuthentication` + +```python +value: models.SourceOracleEnterprisePasswordAuthentication = /* values here */ +``` + diff --git a/docs/models/sourceoracleenterprisesystemidsid.md b/docs/models/sourceoracleenterprisesystemidsid.md new file mode 100644 index 00000000..1943cb2d --- /dev/null +++ b/docs/models/sourceoracleenterprisesystemidsid.md @@ -0,0 +1,12 @@ +# SourceOracleEnterpriseSystemIDSID + +Use Oracle System Identifier. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------ | +| `sid` | *str* | :heavy_check_mark: | N/A | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `connection_type` | [Optional[models.SourceOracleEnterpriseSchemasConnectionType]](../models/sourceoracleenterpriseschemasconnectiontype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinations3gluejsonlinesnewlinedelimitedjson.md b/docs/models/sourceoracleenterprisetlsencryptedverifycertificate.md similarity index 60% rename from docs/models/destinations3gluejsonlinesnewlinedelimitedjson.md rename to docs/models/sourceoracleenterprisetlsencryptedverifycertificate.md index a16fc59c..a3f0ed89 100644 --- a/docs/models/destinations3gluejsonlinesnewlinedelimitedjson.md +++ b/docs/models/sourceoracleenterprisetlsencryptedverifycertificate.md @@ -1,10 +1,12 @@ -# DestinationS3GlueJSONLinesNewlineDelimitedJSON +# SourceOracleEnterpriseTLSEncryptedVerifyCertificate + +Verify and use the certificate provided by the server. ## Fields | Field | Type | Required | Description | | ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | -| `compression` | [Optional[models.DestinationS3GlueCompression]](../models/destinations3gluecompression.md) | :heavy_minus_sign: | Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). | -| `flattening` | [Optional[models.Flattening]](../models/flattening.md) | :heavy_minus_sign: | Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details. | -| `format_type` | [Optional[models.DestinationS3GlueFormatType]](../models/destinations3glueformattype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `ssl_certificate` | *str* | :heavy_check_mark: | Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations. | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `encryption_method` | [Optional[models.SourceOracleEnterpriseSchemasEncryptionEncryptionMethod]](../models/sourceoracleenterpriseschemasencryptionencryptionmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourceoracleenterprisetunnelmethod.md b/docs/models/sourceoracleenterprisetunnelmethod.md new file mode 100644 index 00000000..443ca1c0 --- /dev/null +++ b/docs/models/sourceoracleenterprisetunnelmethod.md @@ -0,0 +1,8 @@ +# SourceOracleEnterpriseTunnelMethod + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `NO_TUNNEL` | NO_TUNNEL | \ No newline at end of file diff --git a/docs/models/sourceoracleenterpriseunencrypted.md b/docs/models/sourceoracleenterpriseunencrypted.md new file mode 100644 index 00000000..331e81f3 --- /dev/null +++ b/docs/models/sourceoracleenterpriseunencrypted.md @@ -0,0 +1,11 @@ +# SourceOracleEnterpriseUnencrypted + +Data transfer will not be encrypted. + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `encryption_method` | [Optional[models.SourceOracleEnterpriseEncryptionMethod]](../models/sourceoracleenterpriseencryptionmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourceoracleenterpriseupdatemethod.md b/docs/models/sourceoracleenterpriseupdatemethod.md new file mode 100644 index 00000000..d9fe29fc --- /dev/null +++ b/docs/models/sourceoracleenterpriseupdatemethod.md @@ -0,0 +1,19 @@ +# SourceOracleEnterpriseUpdateMethod + +Configures how data is extracted from the database. + + +## Supported Types + +### `models.SourceOracleEnterpriseScanChangesWithUserDefinedCursor` + +```python +value: models.SourceOracleEnterpriseScanChangesWithUserDefinedCursor = /* values here */ +``` + +### `models.SourceOracleEnterpriseReadChangesUsingChangeDataCaptureCDC` + +```python +value: models.SourceOracleEnterpriseReadChangesUsingChangeDataCaptureCDC = /* values here */ +``` + diff --git a/docs/models/sourceoutbrainamplify.md b/docs/models/sourceoutbrainamplify.md index 519af9f3..57ec784d 100644 --- a/docs/models/sourceoutbrainamplify.md +++ b/docs/models/sourceoutbrainamplify.md @@ -7,6 +7,7 @@ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `credentials` | [models.SourceOutbrainAmplifyAuthenticationMethod](../models/sourceoutbrainamplifyauthenticationmethod.md) | :heavy_check_mark: | Credentials for making authenticated requests requires either username/password or access_token. | | `start_date` | *str* | :heavy_check_mark: | Date in the format YYYY-MM-DD eg. 2017-01-25. Any data before this date will not be replicated. | +| `conversion_count` | [Optional[models.DefinitionOfConversionCountInReports]](../models/definitionofconversioncountinreports.md) | :heavy_minus_sign: | The definition of conversion count in reports. See the docs. | | `end_date` | *Optional[str]* | :heavy_minus_sign: | Date in the format YYYY-MM-DD. | | `geo_location_breakdown` | [Optional[models.GranularityForGeoLocationRegion]](../models/granularityforgeolocationregion.md) | :heavy_minus_sign: | The granularity used for geo location data in reports. | | `report_granularity` | [Optional[models.GranularityForPeriodicReports]](../models/granularityforperiodicreports.md) | :heavy_minus_sign: | The granularity used for periodic data in reports. See the docs. | diff --git a/docs/models/sourceoutreach.md b/docs/models/sourceoutreach.md index 97a20934..f384f4b5 100644 --- a/docs/models/sourceoutreach.md +++ b/docs/models/sourceoutreach.md @@ -9,5 +9,5 @@ | `client_secret` | *str* | :heavy_check_mark: | The Client Secret of your Outreach developer application. | | | `redirect_uri` | *str* | :heavy_check_mark: | A Redirect URI is the location where the authorization server sends the user once the app has been successfully authorized and granted an authorization code or access token. | | | `refresh_token` | *str* | :heavy_check_mark: | The token for obtaining the new access token. | | -| `start_date` | *str* | :heavy_check_mark: | The date from which you'd like to replicate data for Outreach API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. | 2020-11-16T00:00:00Z | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | The date from which you'd like to replicate data for Outreach API, in the format YYYY-MM-DDT00:00:00.000Z. All data generated after this date will be replicated. | 2020-11-16T00:00:00.000Z | | `source_type` | [models.Outreach](../models/outreach.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcepaddle.md b/docs/models/sourcepaddle.md new file mode 100644 index 00000000..f6726958 --- /dev/null +++ b/docs/models/sourcepaddle.md @@ -0,0 +1,11 @@ +# SourcePaddle + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your Paddle API key. You can generate it by navigating to Paddle > Developer tools > Authentication > Generate API key. Treat this key like a password and keep it secure. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `environment` | [Optional[models.SourcePaddleEnvironment]](../models/sourcepaddleenvironment.md) | :heavy_minus_sign: | The environment for the Paddle API, either 'sandbox' or 'live'. | +| `source_type` | [models.Paddle](../models/paddle.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcepaddleenvironment.md b/docs/models/sourcepaddleenvironment.md new file mode 100644 index 00000000..361d48a0 --- /dev/null +++ b/docs/models/sourcepaddleenvironment.md @@ -0,0 +1,11 @@ +# SourcePaddleEnvironment + +The environment for the Paddle API, either 'sandbox' or 'live'. + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `API` | api | +| `SANDBOX_API` | sandbox-api | \ No newline at end of file diff --git a/docs/models/sourcepagerduty.md b/docs/models/sourcepagerduty.md new file mode 100644 index 00000000..05fe36e8 --- /dev/null +++ b/docs/models/sourcepagerduty.md @@ -0,0 +1,16 @@ +# SourcePagerduty + + +## Fields + +| Field | Type | Required | Description | Example | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `token` | *str* | :heavy_check_mark: | API key for PagerDuty API authentication | | +| `cutoff_days` | *Optional[int]* | :heavy_minus_sign: | Fetch pipelines updated in the last number of days | | +| `default_severity` | *Optional[str]* | :heavy_minus_sign: | A default severity category if not present | Sev1 | +| `exclude_services` | List[*str*] | :heavy_minus_sign: | List of PagerDuty service names to ignore incidents from. If not set, all incidents will be pulled. | service-1 | +| `incident_log_entries_overview` | *Optional[bool]* | :heavy_minus_sign: | If true, will return a subset of log entries that show only the most important changes to the incident. | | +| `max_retries` | *Optional[int]* | :heavy_minus_sign: | Maximum number of PagerDuty API request retries to perform upon connection errors. The source will pause for an exponentially increasing number of seconds before retrying. | | +| `page_size` | *Optional[int]* | :heavy_minus_sign: | page size to use when querying PagerDuty API | | +| `service_details` | List[[models.ServiceDetails](../models/servicedetails.md)] | :heavy_minus_sign: | List of PagerDuty service additional details to include. | | +| `source_type` | [models.Pagerduty](../models/pagerduty.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcepardot.md b/docs/models/sourcepardot.md index b55cf8bf..f9991e1d 100644 --- a/docs/models/sourcepardot.md +++ b/docs/models/sourcepardot.md @@ -10,5 +10,6 @@ | `pardot_business_unit_id` | *str* | :heavy_check_mark: | Pardot Business ID, can be found at Setup > Pardot > Pardot Account Setup | | | `refresh_token` | *str* | :heavy_check_mark: | Salesforce Refresh Token used for Airbyte to access your Salesforce account. If you don't know what this is, follow this guide to retrieve it. | | | `is_sandbox` | *Optional[bool]* | :heavy_minus_sign: | Whether or not the the app is in a Salesforce sandbox. If you do not know what this, assume it is false. | | +| `page_size` | *Optional[str]* | :heavy_minus_sign: | The maximum number of records to return per request | | | `source_type` | [models.Pardot](../models/pardot.md) | :heavy_check_mark: | N/A | | -| `start_date` | *Optional[str]* | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. Leave blank to skip this filter | 2021-07-25T00:00:00Z | \ No newline at end of file +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2000-01-01T00:00:00Z. Any data before this date will not be replicated. Defaults to the year Pardot was released. | 2021-07-25T00:00:00Z | \ No newline at end of file diff --git a/docs/models/sourcepartnerize.md b/docs/models/sourcepartnerize.md new file mode 100644 index 00000000..a93fa538 --- /dev/null +++ b/docs/models/sourcepartnerize.md @@ -0,0 +1,10 @@ +# SourcePartnerize + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `application_key` | *str* | :heavy_check_mark: | The application key identifies the network you are making the request against. Find it in your account settings under 'User Application Key' at https://console.partnerize.com. | +| `user_api_key` | *str* | :heavy_check_mark: | The user API key identifies the user on whose behalf the request is made. Find it in your account settings under 'User API Key' at https://console.partnerize.com. | +| `source_type` | [models.Partnerize](../models/partnerize.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcepartnerstack.md b/docs/models/sourcepartnerstack.md new file mode 100644 index 00000000..9ee977cb --- /dev/null +++ b/docs/models/sourcepartnerstack.md @@ -0,0 +1,11 @@ +# SourcePartnerstack + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | +| `private_key` | *str* | :heavy_check_mark: | The Live Private Key for a Partnerstack account. | | +| `public_key` | *str* | :heavy_check_mark: | The Live Public Key for a Partnerstack account. | | +| `source_type` | [models.Partnerstack](../models/partnerstack.md) | :heavy_check_mark: | N/A | | +| `start_date` | *Optional[str]* | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. | 2017-01-25T00:00:00Z | \ No newline at end of file diff --git a/docs/models/sourcepatchrequest.md b/docs/models/sourcepatchrequest.md index 8de36ba0..a9714abe 100644 --- a/docs/models/sourcepatchrequest.md +++ b/docs/models/sourcepatchrequest.md @@ -3,9 +3,10 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | -| `configuration` | [Optional[models.SourceConfiguration]](../models/sourceconfiguration.md) | :heavy_minus_sign: | The values required to configure the source. | {
"user": "charles"
} | -| `name` | *Optional[str]* | :heavy_minus_sign: | N/A | My source | -| `secret_id` | *Optional[str]* | :heavy_minus_sign: | Optional secretID obtained through the OAuth redirect flow. | | -| `workspace_id` | *Optional[str]* | :heavy_minus_sign: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `configuration` | [Optional[models.SourceConfiguration]](../models/sourceconfiguration.md) | :heavy_minus_sign: | The values required to configure the source. | {
"user": "charles"
} | +| `name` | *Optional[str]* | :heavy_minus_sign: | N/A | My source | +| `resource_allocation` | [Optional[models.ScopedResourceRequirements]](../models/scopedresourcerequirements.md) | :heavy_minus_sign: | actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level. | | +| `secret_id` | *Optional[str]* | :heavy_minus_sign: | Optional secretID obtained through the OAuth redirect flow. | | +| `workspace_id` | *Optional[str]* | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcepayfit.md b/docs/models/sourcepayfit.md new file mode 100644 index 00000000..5a861f46 --- /dev/null +++ b/docs/models/sourcepayfit.md @@ -0,0 +1,10 @@ +# SourcePayfit + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------ | ------------------------------------ | ------------------------------------ | ------------------------------------ | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `company_id` | *str* | :heavy_check_mark: | N/A | +| `source_type` | [models.Payfit](../models/payfit.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceperigon.md b/docs/models/sourceperigon.md new file mode 100644 index 00000000..d35746c1 --- /dev/null +++ b/docs/models/sourceperigon.md @@ -0,0 +1,10 @@ +# SourcePerigon + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your API key for authenticating with the Perigon API. Obtain it by creating an account at https://www.perigon.io/sign-up and verifying your email. The API key will be visible on your account dashboard. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Perigon](../models/perigon.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcephyllo.md b/docs/models/sourcephyllo.md new file mode 100644 index 00000000..0c77310a --- /dev/null +++ b/docs/models/sourcephyllo.md @@ -0,0 +1,12 @@ +# SourcePhyllo + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- | +| `client_id` | *str* | :heavy_check_mark: | Your Client ID for the Phyllo API. You can find this in the Phyllo Developer Dashboard under API credentials. | +| `client_secret` | *str* | :heavy_check_mark: | Your Client Secret for the Phyllo API. You can find this in the Phyllo Developer Dashboard under API credentials. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `environment` | [Optional[models.SourcePhylloEnvironment]](../models/sourcephylloenvironment.md) | :heavy_minus_sign: | The environment for the API (e.g., 'api.sandbox', 'api.staging', 'api') | +| `source_type` | [models.Phyllo](../models/phyllo.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcephylloenvironment.md b/docs/models/sourcephylloenvironment.md new file mode 100644 index 00000000..70ff2d93 --- /dev/null +++ b/docs/models/sourcephylloenvironment.md @@ -0,0 +1,12 @@ +# SourcePhylloEnvironment + +The environment for the API (e.g., 'api.sandbox', 'api.staging', 'api') + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `API_SANDBOX` | api.sandbox | +| `API_STAGING` | api.staging | +| `API` | api | \ No newline at end of file diff --git a/docs/models/sourcepingdom.md b/docs/models/sourcepingdom.md new file mode 100644 index 00000000..b030e643 --- /dev/null +++ b/docs/models/sourcepingdom.md @@ -0,0 +1,12 @@ +# SourcePingdom + + +## Fields + +| Field | Type | Required | Description | Example | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | | +| `probes` | *Optional[str]* | :heavy_minus_sign: | N/A | probe1 | +| `resolution` | [Optional[models.Resolution]](../models/resolution.md) | :heavy_minus_sign: | N/A | | +| `source_type` | [models.Pingdom](../models/pingdom.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcepinterest.md b/docs/models/sourcepinterest.md index 258d524c..b056fc9e 100644 --- a/docs/models/sourcepinterest.md +++ b/docs/models/sourcepinterest.md @@ -5,6 +5,7 @@ | Field | Type | Required | Description | Example | | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `account_id` | *Optional[str]* | :heavy_minus_sign: | The Pinterest account ID you want to fetch data for. This ID must be provided to filter the data for a specific account. | 1234567890 | | `credentials` | [Optional[models.OAuth20]](../models/oauth20.md) | :heavy_minus_sign: | N/A | | | `custom_reports` | List[[models.ReportConfig](../models/reportconfig.md)] | :heavy_minus_sign: | A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on "add" to fill this field. | | | `source_type` | [Optional[models.SourcePinterestPinterest]](../models/sourcepinterestpinterest.md) | :heavy_minus_sign: | N/A | | diff --git a/docs/models/sourcepoplar.md b/docs/models/sourcepoplar.md new file mode 100644 index 00000000..2e938740 --- /dev/null +++ b/docs/models/sourcepoplar.md @@ -0,0 +1,10 @@ +# SourcePoplar + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `access_token` | *str* | :heavy_check_mark: | Your Poplar API Access Token. Generate it from the [API Credentials page](https://app.heypoplar.com/credentials) in your account. Use a production token for live data or a test token for testing purposes. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Poplar](../models/poplar.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceprintify.md b/docs/models/sourceprintify.md new file mode 100644 index 00000000..606ade5f --- /dev/null +++ b/docs/models/sourceprintify.md @@ -0,0 +1,9 @@ +# SourcePrintify + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------- | ----------------------------------------------------------------------- | ----------------------------------------------------------------------- | ----------------------------------------------------------------------- | +| `api_token` | *str* | :heavy_check_mark: | Your Printify API token. Obtain it from your Printify account settings. | +| `source_type` | [models.Printify](../models/printify.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceputrequest.md b/docs/models/sourceputrequest.md index c56214fe..2e7c0795 100644 --- a/docs/models/sourceputrequest.md +++ b/docs/models/sourceputrequest.md @@ -3,7 +3,8 @@ ## Fields -| Field | Type | Required | Description | Example | -| -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | -| `configuration` | [models.SourceConfiguration](../models/sourceconfiguration.md) | :heavy_check_mark: | The values required to configure the source. | {
"user": "charles"
} | -| `name` | *str* | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `configuration` | [models.SourceConfiguration](../models/sourceconfiguration.md) | :heavy_check_mark: | The values required to configure the source. | {
"user": "charles"
} | +| `name` | *str* | :heavy_check_mark: | N/A | | +| `resource_allocation` | [Optional[models.ScopedResourceRequirements]](../models/scopedresourcerequirements.md) | :heavy_minus_sign: | actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level. | | \ No newline at end of file diff --git a/docs/models/sourcequickbooks.md b/docs/models/sourcequickbooks.md index 03eed693..90ad7d59 100644 --- a/docs/models/sourcequickbooks.md +++ b/docs/models/sourcequickbooks.md @@ -3,9 +3,15 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `credentials` | [models.SourceQuickbooksAuthorizationMethod](../models/sourcequickbooksauthorizationmethod.md) | :heavy_check_mark: | N/A | | -| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | The default value to use if no bookmark exists for an endpoint (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this date will not be replicated. | 2021-03-20T00:00:00Z | -| `sandbox` | *Optional[bool]* | :heavy_minus_sign: | Determines whether to use the sandbox or production environment. | | -| `source_type` | [models.Quickbooks](../models/quickbooks.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `access_token` | *str* | :heavy_check_mark: | Access token for making authenticated requests. | | +| `client_id` | *str* | :heavy_check_mark: | Identifies which app is making the request. Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production. | | +| `client_secret` | *str* | :heavy_check_mark: | Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production. | | +| `realm_id` | *str* | :heavy_check_mark: | Labeled Company ID. The Make API Calls panel is populated with the realm id and the current access token. | | +| `refresh_token` | *str* | :heavy_check_mark: | A token used when refreshing the access token. | | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | The default value to use if no bookmark exists for an endpoint (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this date will not be replicated. | 2021-03-20T00:00:00Z | +| `token_expiry_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | The date-time when the access token should be refreshed. | | +| `auth_type` | [Optional[models.SourceQuickbooksAuthType]](../models/sourcequickbooksauthtype.md) | :heavy_minus_sign: | N/A | | +| `sandbox` | *Optional[bool]* | :heavy_minus_sign: | Determines whether to use the sandbox or production environment. | | +| `source_type` | [models.Quickbooks](../models/quickbooks.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcequickbooksauthorizationmethod.md b/docs/models/sourcequickbooksauthorizationmethod.md deleted file mode 100644 index 462c218d..00000000 --- a/docs/models/sourcequickbooksauthorizationmethod.md +++ /dev/null @@ -1,11 +0,0 @@ -# SourceQuickbooksAuthorizationMethod - - -## Supported Types - -### `models.SourceQuickbooksOAuth20` - -```python -value: models.SourceQuickbooksOAuth20 = /* values here */ -``` - diff --git a/docs/models/sourcerecharge.md b/docs/models/sourcerecharge.md index 0387e544..b819b552 100644 --- a/docs/models/sourcerecharge.md +++ b/docs/models/sourcerecharge.md @@ -7,5 +7,6 @@ | -------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------- | | `access_token` | *str* | :heavy_check_mark: | The value of the Access Token generated. See the docs for more information. | | | `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | The date from which you'd like to replicate data for Recharge API, in the format YYYY-MM-DDT00:00:00Z. Any data before this date will not be replicated. | 2021-05-14T00:00:00Z | +| `lookback_window_days` | *Optional[int]* | :heavy_minus_sign: | Specifies how many days of historical data should be reloaded each time the recharge connector runs. | | | `source_type` | [models.Recharge](../models/recharge.md) | :heavy_check_mark: | N/A | | | `use_orders_deprecated_api` | *Optional[bool]* | :heavy_minus_sign: | Define whether or not the `Orders` stream should use the deprecated `2021-01` API version, or use `2021-11`, otherwise. | | \ No newline at end of file diff --git a/docs/models/sourcerecurly.md b/docs/models/sourcerecurly.md index 00158959..58e3b1d6 100644 --- a/docs/models/sourcerecurly.md +++ b/docs/models/sourcerecurly.md @@ -6,6 +6,9 @@ | Field | Type | Required | Description | Example | | ---------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------- | | `api_key` | *str* | :heavy_check_mark: | Recurly API Key. See the docs for more information on how to generate this key. | | -| `begin_time` | *Optional[str]* | :heavy_minus_sign: | ISO8601 timestamp from which the replication from Recurly API will start from. | 2021-12-01T00:00:00 | -| `end_time` | *Optional[str]* | :heavy_minus_sign: | ISO8601 timestamp to which the replication from Recurly API will stop. Records after that date won't be imported. | 2021-12-01T00:00:00 | +| `accounts_step_days` | *Optional[int]* | :heavy_minus_sign: | Days in length for each API call to get data from the accounts stream. Smaller values will result in more API calls but better concurrency. | 7 | +| `begin_time` | *Optional[str]* | :heavy_minus_sign: | ISO8601 timestamp from which the replication from Recurly API will start from. | 2021-12-01T00:00:00Z | +| `end_time` | *Optional[str]* | :heavy_minus_sign: | ISO8601 timestamp to which the replication from Recurly API will stop. Records after that date won't be imported. | 2021-12-01T00:00:00Z | +| `is_sandbox` | *Optional[bool]* | :heavy_minus_sign: | Set to true for sandbox accounts (400 requests/min, all types). Defaults to false for production accounts (1,000 GET requests/min). | | +| `num_workers` | *Optional[int]* | :heavy_minus_sign: | The number of worker threads to use for the sync. | 1 | | `source_type` | [models.Recurly](../models/recurly.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourceresponse.md b/docs/models/sourceresponse.md index 8755ff78..e45a2011 100644 --- a/docs/models/sourceresponse.md +++ b/docs/models/sourceresponse.md @@ -5,12 +5,13 @@ Provides details of a single source. ## Fields -| Field | Type | Required | Description | Example | -| -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | -| `configuration` | [models.SourceConfiguration](../models/sourceconfiguration.md) | :heavy_check_mark: | The values required to configure the source. | {
"user": "charles"
} | -| `created_at` | *int* | :heavy_check_mark: | N/A | | -| `definition_id` | *str* | :heavy_check_mark: | N/A | | -| `name` | *str* | :heavy_check_mark: | N/A | | -| `source_id` | *str* | :heavy_check_mark: | N/A | | -| `source_type` | *str* | :heavy_check_mark: | N/A | | -| `workspace_id` | *str* | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `configuration` | [models.SourceConfiguration](../models/sourceconfiguration.md) | :heavy_check_mark: | The values required to configure the source. | {
"user": "charles"
} | +| `created_at` | *int* | :heavy_check_mark: | N/A | | +| `definition_id` | *str* | :heavy_check_mark: | N/A | | +| `name` | *str* | :heavy_check_mark: | N/A | | +| `source_id` | *str* | :heavy_check_mark: | N/A | | +| `source_type` | *str* | :heavy_check_mark: | N/A | | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | | +| `resource_allocation` | [Optional[models.ScopedResourceRequirements]](../models/scopedresourcerequirements.md) | :heavy_minus_sign: | actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level. | | \ No newline at end of file diff --git a/docs/models/sourceretailexpressbymaropost.md b/docs/models/sourceretailexpressbymaropost.md new file mode 100644 index 00000000..5275ebd1 --- /dev/null +++ b/docs/models/sourceretailexpressbymaropost.md @@ -0,0 +1,10 @@ +# SourceRetailexpressByMaropost + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.RetailexpressByMaropost](../models/retailexpressbymaropost.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceringcentral.md b/docs/models/sourceringcentral.md new file mode 100644 index 00000000..6e802982 --- /dev/null +++ b/docs/models/sourceringcentral.md @@ -0,0 +1,11 @@ +# SourceRingcentral + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `account_id` | *str* | :heavy_check_mark: | Could be seen at response to basic api call to an endpoint with ~ operator. Example- (https://platform.devtest.ringcentral.com/restapi/v1.0/account/~/extension/~/business-hours)
| +| `auth_token` | *str* | :heavy_check_mark: | Token could be recieved by following instructions at https://developers.ringcentral.com/api-reference/authentication | +| `extension_id` | *str* | :heavy_check_mark: | Could be seen at response to basic api call to an endpoint with ~ operator. Example- (https://platform.devtest.ringcentral.com/restapi/v1.0/account/~/extension/~/business-hours)
| +| `source_type` | [models.Ringcentral](../models/ringcentral.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcerocketchat.md b/docs/models/sourcerocketchat.md new file mode 100644 index 00000000..cba5787d --- /dev/null +++ b/docs/models/sourcerocketchat.md @@ -0,0 +1,11 @@ +# SourceRocketChat + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `endpoint` | *str* | :heavy_check_mark: | Your rocket.chat instance URL. | https://airbyte-connector-poc.rocket.chat | +| `token` | *str* | :heavy_check_mark: | Your API Token. See here. The token is case sensitive. | | +| `user_id` | *str* | :heavy_check_mark: | Your User Id. | | +| `source_type` | [models.RocketChat](../models/rocketchat.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sources3.md b/docs/models/sources3.md index 9e429948..0f9ef9b2 100644 --- a/docs/models/sources3.md +++ b/docs/models/sources3.md @@ -12,7 +12,7 @@ because it is responsible for converting legacy S3 v3 configs into v4 configs us | `streams` | List[[models.SourceS3FileBasedStreamConfig](../models/sources3filebasedstreamconfig.md)] | :heavy_check_mark: | Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. | | | `aws_access_key_id` | *Optional[str]* | :heavy_minus_sign: | In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. | | | `aws_secret_access_key` | *Optional[str]* | :heavy_minus_sign: | In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. | | -| `delivery_method` | [Optional[models.DeliveryMethod]](../models/deliverymethod.md) | :heavy_minus_sign: | N/A | | +| `delivery_method` | [Optional[models.SourceS3DeliveryMethod]](../models/sources3deliverymethod.md) | :heavy_minus_sign: | N/A | | | `endpoint` | *Optional[str]* | :heavy_minus_sign: | Endpoint to an S3 compatible service. Leave empty to use AWS. | my-s3-endpoint.com | | `region_name` | *Optional[str]* | :heavy_minus_sign: | AWS region where the S3 bucket is located. If not provided, the region will be determined automatically. | | | `role_arn` | *Optional[str]* | :heavy_minus_sign: | Specifies the Amazon Resource Name (ARN) of an IAM role that you want to use to perform operations requested using this profile. Set the External ID to the Airbyte workspace ID, which can be found in the URL of this page. | | diff --git a/docs/models/sources3copyrawfiles.md b/docs/models/sources3copyrawfiles.md new file mode 100644 index 00000000..54bfec10 --- /dev/null +++ b/docs/models/sources3copyrawfiles.md @@ -0,0 +1,11 @@ +# SourceS3CopyRawFiles + +Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `delivery_type` | [Optional[models.SourceS3SchemasDeliveryType]](../models/sources3schemasdeliverytype.md) | :heavy_minus_sign: | N/A | +| `preserve_directory_structure` | *Optional[bool]* | :heavy_minus_sign: | If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. | \ No newline at end of file diff --git a/docs/models/sources3deliverymethod.md b/docs/models/sources3deliverymethod.md new file mode 100644 index 00000000..47486f1e --- /dev/null +++ b/docs/models/sources3deliverymethod.md @@ -0,0 +1,17 @@ +# SourceS3DeliveryMethod + + +## Supported Types + +### `models.SourceS3ReplicateRecords` + +```python +value: models.SourceS3ReplicateRecords = /* values here */ +``` + +### `models.SourceS3CopyRawFiles` + +```python +value: models.SourceS3CopyRawFiles = /* values here */ +``` + diff --git a/docs/models/sources3deliverytype.md b/docs/models/sources3deliverytype.md index 741fd2c5..e17c1d2b 100644 --- a/docs/models/sources3deliverytype.md +++ b/docs/models/sources3deliverytype.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------------------- | ------------------- | -| `USE_FILE_TRANSFER` | use_file_transfer | \ No newline at end of file +| Name | Value | +| ---------------------- | ---------------------- | +| `USE_RECORDS_TRANSFER` | use_records_transfer | \ No newline at end of file diff --git a/docs/models/sources3replicaterecords.md b/docs/models/sources3replicaterecords.md new file mode 100644 index 00000000..f5fa1873 --- /dev/null +++ b/docs/models/sources3replicaterecords.md @@ -0,0 +1,10 @@ +# SourceS3ReplicateRecords + +Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination. + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------- | +| `delivery_type` | [Optional[models.SourceS3DeliveryType]](../models/sources3deliverytype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sources3schemasdeliverytype.md b/docs/models/sources3schemasdeliverytype.md new file mode 100644 index 00000000..c943d862 --- /dev/null +++ b/docs/models/sources3schemasdeliverytype.md @@ -0,0 +1,8 @@ +# SourceS3SchemasDeliveryType + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `USE_FILE_TRANSFER` | use_file_transfer | \ No newline at end of file diff --git a/docs/models/sourcesalesforce.md b/docs/models/sourcesalesforce.md index b0af206c..1f96aefe 100644 --- a/docs/models/sourcesalesforce.md +++ b/docs/models/sourcesalesforce.md @@ -8,7 +8,7 @@ | `client_id` | *str* | :heavy_check_mark: | Enter your Salesforce developer application's Client ID | | | `client_secret` | *str* | :heavy_check_mark: | Enter your Salesforce developer application's Client secret | | | `refresh_token` | *str* | :heavy_check_mark: | Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce account. | | -| `auth_type` | [Optional[models.AuthType]](../models/authtype.md) | :heavy_minus_sign: | N/A | | +| `auth_type` | [Optional[models.SourceSalesforceAuthType]](../models/sourcesalesforceauthtype.md) | :heavy_minus_sign: | N/A | | | `force_use_bulk_api` | *Optional[bool]* | :heavy_minus_sign: | Toggle to use Bulk API (this might cause empty fields for some streams) | | | `is_sandbox` | *Optional[bool]* | :heavy_minus_sign: | Toggle if you're using a Salesforce Sandbox | | | `source_type` | [models.SourceSalesforceSalesforce](../models/sourcesalesforcesalesforce.md) | :heavy_check_mark: | N/A | | diff --git a/docs/models/sourcesalesforceauthtype.md b/docs/models/sourcesalesforceauthtype.md new file mode 100644 index 00000000..2b894b47 --- /dev/null +++ b/docs/models/sourcesalesforceauthtype.md @@ -0,0 +1,8 @@ +# SourceSalesforceAuthType + + +## Values + +| Name | Value | +| -------- | -------- | +| `CLIENT` | Client | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterprise.md b/docs/models/sourcesaphanaenterprise.md new file mode 100644 index 00000000..5f502e26 --- /dev/null +++ b/docs/models/sourcesaphanaenterprise.md @@ -0,0 +1,20 @@ +# SourceSapHanaEnterprise + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `cursor` | [models.SourceSapHanaEnterpriseUpdateMethod](../models/sourcesaphanaenterpriseupdatemethod.md) | :heavy_check_mark: | Configures how data is extracted from the database. | +| `encryption` | [models.SourceSapHanaEnterpriseEncryption](../models/sourcesaphanaenterpriseencryption.md) | :heavy_check_mark: | The encryption method with is used when communicating with the database. | +| `host` | *str* | :heavy_check_mark: | Hostname of the database. | +| `tunnel_method` | [models.SourceSapHanaEnterpriseSSHTunnelMethod](../models/sourcesaphanaenterprisesshtunnelmethod.md) | :heavy_check_mark: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | +| `username` | *str* | :heavy_check_mark: | The username which is used to access the database. | +| `check_privileges` | *Optional[bool]* | :heavy_minus_sign: | When this feature is enabled, during schema discovery the connector will query each table or view individually to check access privileges and inaccessible tables, views, or columns therein will be removed. In large schemas, this might cause schema discovery to take too long, in which case it might be advisable to disable this feature. | +| `checkpoint_target_interval_seconds` | *Optional[int]* | :heavy_minus_sign: | How often (in seconds) a stream should checkpoint, when possible. | +| `concurrency` | *Optional[int]* | :heavy_minus_sign: | Maximum number of concurrent queries to the database. | +| `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). | +| `password` | *Optional[str]* | :heavy_minus_sign: | The password associated with the username. | +| `port` | *Optional[int]* | :heavy_minus_sign: | Port of the database.
SapHana Corporations recommends the following port numbers:
443 - Default listening port for SAP HANA cloud client connections to the listener. | +| `schemas` | List[*str*] | :heavy_minus_sign: | The list of schemas to sync from. Defaults to user. Case sensitive. | +| `source_type` | [models.SapHanaEnterprise](../models/saphanaenterprise.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterprisecursormethod.md b/docs/models/sourcesaphanaenterprisecursormethod.md new file mode 100644 index 00000000..b5e1ef09 --- /dev/null +++ b/docs/models/sourcesaphanaenterprisecursormethod.md @@ -0,0 +1,8 @@ +# SourceSapHanaEnterpriseCursorMethod + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `USER_DEFINED` | user_defined | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterpriseencryption.md b/docs/models/sourcesaphanaenterpriseencryption.md new file mode 100644 index 00000000..c7fff0e5 --- /dev/null +++ b/docs/models/sourcesaphanaenterpriseencryption.md @@ -0,0 +1,25 @@ +# SourceSapHanaEnterpriseEncryption + +The encryption method with is used when communicating with the database. + + +## Supported Types + +### `models.SourceSapHanaEnterpriseUnencrypted` + +```python +value: models.SourceSapHanaEnterpriseUnencrypted = /* values here */ +``` + +### `models.SourceSapHanaEnterpriseNativeNetworkEncryptionNNE` + +```python +value: models.SourceSapHanaEnterpriseNativeNetworkEncryptionNNE = /* values here */ +``` + +### `models.SourceSapHanaEnterpriseTLSEncryptedVerifyCertificate` + +```python +value: models.SourceSapHanaEnterpriseTLSEncryptedVerifyCertificate = /* values here */ +``` + diff --git a/docs/models/sourcesaphanaenterpriseencryptionalgorithm.md b/docs/models/sourcesaphanaenterpriseencryptionalgorithm.md new file mode 100644 index 00000000..6c3b5712 --- /dev/null +++ b/docs/models/sourcesaphanaenterpriseencryptionalgorithm.md @@ -0,0 +1,12 @@ +# SourceSapHanaEnterpriseEncryptionAlgorithm + +This parameter defines what encryption algorithm is used. + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `AES256` | AES256 | +| `RC4_56` | RC4_56 | +| `THREE_DES168` | 3DES168 | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterpriseencryptionmethod.md b/docs/models/sourcesaphanaenterpriseencryptionmethod.md new file mode 100644 index 00000000..65c9d42e --- /dev/null +++ b/docs/models/sourcesaphanaenterpriseencryptionmethod.md @@ -0,0 +1,8 @@ +# SourceSapHanaEnterpriseEncryptionMethod + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `UNENCRYPTED` | unencrypted | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterpriseinvalidcdcpositionbehavioradvanced.md b/docs/models/sourcesaphanaenterpriseinvalidcdcpositionbehavioradvanced.md new file mode 100644 index 00000000..598bb369 --- /dev/null +++ b/docs/models/sourcesaphanaenterpriseinvalidcdcpositionbehavioradvanced.md @@ -0,0 +1,11 @@ +# SourceSapHanaEnterpriseInvalidCDCPositionBehaviorAdvanced + +Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value in the mined logs. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss. + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `FAIL_SYNC` | Fail sync | +| `RE_SYNC_DATA` | Re-sync data | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterprisenativenetworkencryptionnne.md b/docs/models/sourcesaphanaenterprisenativenetworkencryptionnne.md new file mode 100644 index 00000000..b6396949 --- /dev/null +++ b/docs/models/sourcesaphanaenterprisenativenetworkencryptionnne.md @@ -0,0 +1,12 @@ +# SourceSapHanaEnterpriseNativeNetworkEncryptionNNE + +The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `encryption_algorithm` | [Optional[models.SourceSapHanaEnterpriseEncryptionAlgorithm]](../models/sourcesaphanaenterpriseencryptionalgorithm.md) | :heavy_minus_sign: | This parameter defines what encryption algorithm is used. | +| `encryption_method` | [Optional[models.SourceSapHanaEnterpriseSchemasEncryptionMethod]](../models/sourcesaphanaenterpriseschemasencryptionmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterprisenotunnel.md b/docs/models/sourcesaphanaenterprisenotunnel.md new file mode 100644 index 00000000..a1ba82cf --- /dev/null +++ b/docs/models/sourcesaphanaenterprisenotunnel.md @@ -0,0 +1,11 @@ +# SourceSapHanaEnterpriseNoTunnel + +No ssh tunnel needed to connect to database + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `tunnel_method` | [Optional[models.SourceSapHanaEnterpriseTunnelMethod]](../models/sourcesaphanaenterprisetunnelmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterprisepasswordauthentication.md b/docs/models/sourcesaphanaenterprisepasswordauthentication.md new file mode 100644 index 00000000..730595b4 --- /dev/null +++ b/docs/models/sourcesaphanaenterprisepasswordauthentication.md @@ -0,0 +1,15 @@ +# SourceSapHanaEnterprisePasswordAuthentication + +Connect through a jump server tunnel host using username and password authentication + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | +| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | +| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host | +| `tunnel_user_password` | *str* | :heavy_check_mark: | OS-level password for logging into the jump server host | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `tunnel_method` | [Optional[models.SourceSapHanaEnterpriseSchemasTunnelMethodTunnelMethod]](../models/sourcesaphanaenterpriseschemastunnelmethodtunnelmethod.md) | :heavy_minus_sign: | N/A | +| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterprisereadchangesusingchangedatacapturecdc.md b/docs/models/sourcesaphanaenterprisereadchangesusingchangedatacapturecdc.md new file mode 100644 index 00000000..82fb7ef1 --- /dev/null +++ b/docs/models/sourcesaphanaenterprisereadchangesusingchangedatacapturecdc.md @@ -0,0 +1,13 @@ +# SourceSapHanaEnterpriseReadChangesUsingChangeDataCaptureCDC + +Recommended - Incrementally reads new inserts, updates, and deletes using change data capture feature. This must be enabled on your database. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `cursor_method` | [Optional[models.SourceSapHanaEnterpriseSchemasCursorMethod]](../models/sourcesaphanaenterpriseschemascursormethod.md) | :heavy_minus_sign: | N/A | +| `initial_load_timeout_hours` | *Optional[int]* | :heavy_minus_sign: | The amount of time an initial load is allowed to continue for before catching up on CDC events. | +| `invalid_cdc_cursor_position_behavior` | [Optional[models.SourceSapHanaEnterpriseInvalidCDCPositionBehaviorAdvanced]](../models/sourcesaphanaenterpriseinvalidcdcpositionbehavioradvanced.md) | :heavy_minus_sign: | Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value in the mined logs. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss. | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterprisescanchangeswithuserdefinedcursor.md b/docs/models/sourcesaphanaenterprisescanchangeswithuserdefinedcursor.md new file mode 100644 index 00000000..ae130e13 --- /dev/null +++ b/docs/models/sourcesaphanaenterprisescanchangeswithuserdefinedcursor.md @@ -0,0 +1,11 @@ +# SourceSapHanaEnterpriseScanChangesWithUserDefinedCursor + +Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `cursor_method` | [Optional[models.SourceSapHanaEnterpriseCursorMethod]](../models/sourcesaphanaenterprisecursormethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterpriseschemascursormethod.md b/docs/models/sourcesaphanaenterpriseschemascursormethod.md new file mode 100644 index 00000000..bc58f51e --- /dev/null +++ b/docs/models/sourcesaphanaenterpriseschemascursormethod.md @@ -0,0 +1,8 @@ +# SourceSapHanaEnterpriseSchemasCursorMethod + + +## Values + +| Name | Value | +| ----- | ----- | +| `CDC` | cdc | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterpriseschemasencryptionencryptionmethod.md b/docs/models/sourcesaphanaenterpriseschemasencryptionencryptionmethod.md new file mode 100644 index 00000000..bbd5f71d --- /dev/null +++ b/docs/models/sourcesaphanaenterpriseschemasencryptionencryptionmethod.md @@ -0,0 +1,8 @@ +# SourceSapHanaEnterpriseSchemasEncryptionEncryptionMethod + + +## Values + +| Name | Value | +| ------------------------------ | ------------------------------ | +| `ENCRYPTED_VERIFY_CERTIFICATE` | encrypted_verify_certificate | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterpriseschemasencryptionmethod.md b/docs/models/sourcesaphanaenterpriseschemasencryptionmethod.md new file mode 100644 index 00000000..96e0ec73 --- /dev/null +++ b/docs/models/sourcesaphanaenterpriseschemasencryptionmethod.md @@ -0,0 +1,8 @@ +# SourceSapHanaEnterpriseSchemasEncryptionMethod + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `CLIENT_NNE` | client_nne | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterpriseschemastunnelmethod.md b/docs/models/sourcesaphanaenterpriseschemastunnelmethod.md new file mode 100644 index 00000000..fa13b702 --- /dev/null +++ b/docs/models/sourcesaphanaenterpriseschemastunnelmethod.md @@ -0,0 +1,8 @@ +# SourceSapHanaEnterpriseSchemasTunnelMethod + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `SSH_KEY_AUTH` | SSH_KEY_AUTH | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterpriseschemastunnelmethodtunnelmethod.md b/docs/models/sourcesaphanaenterpriseschemastunnelmethodtunnelmethod.md new file mode 100644 index 00000000..95b5c37f --- /dev/null +++ b/docs/models/sourcesaphanaenterpriseschemastunnelmethodtunnelmethod.md @@ -0,0 +1,8 @@ +# SourceSapHanaEnterpriseSchemasTunnelMethodTunnelMethod + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `SSH_PASSWORD_AUTH` | SSH_PASSWORD_AUTH | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterprisesshkeyauthentication.md b/docs/models/sourcesaphanaenterprisesshkeyauthentication.md new file mode 100644 index 00000000..b1b41dd7 --- /dev/null +++ b/docs/models/sourcesaphanaenterprisesshkeyauthentication.md @@ -0,0 +1,15 @@ +# SourceSapHanaEnterpriseSSHKeyAuthentication + +Connect through a jump server tunnel host using username and ssh key + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | +| `ssh_key` | *str* | :heavy_check_mark: | OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa ) | +| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | +| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `tunnel_method` | [Optional[models.SourceSapHanaEnterpriseSchemasTunnelMethod]](../models/sourcesaphanaenterpriseschemastunnelmethod.md) | :heavy_minus_sign: | N/A | +| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterprisesshtunnelmethod.md b/docs/models/sourcesaphanaenterprisesshtunnelmethod.md new file mode 100644 index 00000000..05b08f64 --- /dev/null +++ b/docs/models/sourcesaphanaenterprisesshtunnelmethod.md @@ -0,0 +1,25 @@ +# SourceSapHanaEnterpriseSSHTunnelMethod + +Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. + + +## Supported Types + +### `models.SourceSapHanaEnterpriseNoTunnel` + +```python +value: models.SourceSapHanaEnterpriseNoTunnel = /* values here */ +``` + +### `models.SourceSapHanaEnterpriseSSHKeyAuthentication` + +```python +value: models.SourceSapHanaEnterpriseSSHKeyAuthentication = /* values here */ +``` + +### `models.SourceSapHanaEnterprisePasswordAuthentication` + +```python +value: models.SourceSapHanaEnterprisePasswordAuthentication = /* values here */ +``` + diff --git a/docs/models/sourcesaphanaenterprisetlsencryptedverifycertificate.md b/docs/models/sourcesaphanaenterprisetlsencryptedverifycertificate.md new file mode 100644 index 00000000..0fb8d711 --- /dev/null +++ b/docs/models/sourcesaphanaenterprisetlsencryptedverifycertificate.md @@ -0,0 +1,12 @@ +# SourceSapHanaEnterpriseTLSEncryptedVerifyCertificate + +Verify and use the certificate provided by the server. + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | +| `ssl_certificate` | *str* | :heavy_check_mark: | Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations. | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `encryption_method` | [Optional[models.SourceSapHanaEnterpriseSchemasEncryptionEncryptionMethod]](../models/sourcesaphanaenterpriseschemasencryptionencryptionmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterprisetunnelmethod.md b/docs/models/sourcesaphanaenterprisetunnelmethod.md new file mode 100644 index 00000000..a0ad8370 --- /dev/null +++ b/docs/models/sourcesaphanaenterprisetunnelmethod.md @@ -0,0 +1,8 @@ +# SourceSapHanaEnterpriseTunnelMethod + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `NO_TUNNEL` | NO_TUNNEL | \ No newline at end of file diff --git a/docs/models/destinations3gluegzip.md b/docs/models/sourcesaphanaenterpriseunencrypted.md similarity index 62% rename from docs/models/destinations3gluegzip.md rename to docs/models/sourcesaphanaenterpriseunencrypted.md index 48fe1ebb..f8c4d296 100644 --- a/docs/models/destinations3gluegzip.md +++ b/docs/models/sourcesaphanaenterpriseunencrypted.md @@ -1,8 +1,11 @@ -# DestinationS3GlueGZIP +# SourceSapHanaEnterpriseUnencrypted + +Data transfer will not be encrypted. ## Fields | Field | Type | Required | Description | | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | -| `compression_type` | [Optional[models.DestinationS3GlueSchemasCompressionType]](../models/destinations3glueschemascompressiontype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `encryption_method` | [Optional[models.SourceSapHanaEnterpriseEncryptionMethod]](../models/sourcesaphanaenterpriseencryptionmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterpriseupdatemethod.md b/docs/models/sourcesaphanaenterpriseupdatemethod.md new file mode 100644 index 00000000..56d6d7b0 --- /dev/null +++ b/docs/models/sourcesaphanaenterpriseupdatemethod.md @@ -0,0 +1,19 @@ +# SourceSapHanaEnterpriseUpdateMethod + +Configures how data is extracted from the database. + + +## Supported Types + +### `models.SourceSapHanaEnterpriseScanChangesWithUserDefinedCursor` + +```python +value: models.SourceSapHanaEnterpriseScanChangesWithUserDefinedCursor = /* values here */ +``` + +### `models.SourceSapHanaEnterpriseReadChangesUsingChangeDataCaptureCDC` + +```python +value: models.SourceSapHanaEnterpriseReadChangesUsingChangeDataCaptureCDC = /* values here */ +``` + diff --git a/docs/models/sourceserpstat.md b/docs/models/sourceserpstat.md new file mode 100644 index 00000000..50cc2ce0 --- /dev/null +++ b/docs/models/sourceserpstat.md @@ -0,0 +1,18 @@ +# SourceSerpstat + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `api_key` | *str* | :heavy_check_mark: | Serpstat API key can be found here: https://serpstat.com/users/profile/ | +| `domain` | *Optional[str]* | :heavy_minus_sign: | The domain name to get data for (ex. serpstat.com) | +| `domains` | List[*Any*] | :heavy_minus_sign: | The list of domains that will be used in streams that support batch operations | +| `filter_by` | *Optional[str]* | :heavy_minus_sign: | The field name by which the results should be filtered. Filtering the results will result in fewer API credits spent. Each stream has different filtering options. See https://serpstat.com/api/ for more details. | +| `filter_value` | *Optional[str]* | :heavy_minus_sign: | The value of the field to filter by. Each stream has different filtering options. See https://serpstat.com/api/ for more details. | +| `page_size` | *Optional[int]* | :heavy_minus_sign: | The number of data rows per page to be returned. Each data row can contain multiple data points. The max value is 1000. Reducing the size of the page will result in fewer API credits spent. | +| `pages_to_fetch` | *Optional[int]* | :heavy_minus_sign: | The number of pages that should be fetched. All results will be obtained if left blank. Reducing the number of pages will result in fewer API credits spent. | +| `region_id` | *Optional[str]* | :heavy_minus_sign: | The ID of a region to get data from in the form of a two-letter country code prepended with the g_ prefix. See the list of supported region IDs here: https://serpstat.com/api/664-request-parameters-v4/. | +| `sort_by` | *Optional[str]* | :heavy_minus_sign: | The field name by which the results should be sorted. Each stream has different sorting options. See https://serpstat.com/api/ for more details. | +| `sort_value` | *Optional[str]* | :heavy_minus_sign: | The value of the field to sort by. Each stream has different sorting options. See https://serpstat.com/api/ for more details. | +| `source_type` | [models.Serpstat](../models/serpstat.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceservicenow.md b/docs/models/sourceservicenow.md new file mode 100644 index 00000000..f3ca5126 --- /dev/null +++ b/docs/models/sourceservicenow.md @@ -0,0 +1,11 @@ +# SourceServiceNow + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | +| `base_url` | *str* | :heavy_check_mark: | N/A | +| `username` | *str* | :heavy_check_mark: | N/A | +| `password` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `source_type` | [models.ServiceNow](../models/servicenow.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesftpbulkcopyrawfiles.md b/docs/models/sourcesftpbulkcopyrawfiles.md index 5707b082..6d9c68d8 100644 --- a/docs/models/sourcesftpbulkcopyrawfiles.md +++ b/docs/models/sourcesftpbulkcopyrawfiles.md @@ -5,6 +5,7 @@ Copy raw files without parsing their contents. Bits are copied into the destinat ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | -| `delivery_type` | [Optional[models.SourceSftpBulkSchemasDeliveryType]](../models/sourcesftpbulkschemasdeliverytype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `delivery_type` | [Optional[models.SourceSftpBulkSchemasDeliveryType]](../models/sourcesftpbulkschemasdeliverytype.md) | :heavy_minus_sign: | N/A | +| `preserve_directory_structure` | *Optional[bool]* | :heavy_minus_sign: | If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. | \ No newline at end of file diff --git a/docs/models/destinations3glue.md b/docs/models/sourcesharepointenterprise.md similarity index 63% rename from docs/models/destinations3glue.md rename to docs/models/sourcesharepointenterprise.md index 6ec90ff0..d558a1e1 100644 --- a/docs/models/destinations3glue.md +++ b/docs/models/sourcesharepointenterprise.md @@ -1,19 +1,18 @@ -# DestinationS3Glue +# SourceSharepointEnterprise + +SourceMicrosoftSharePointSpec class for Microsoft SharePoint Source Specification. +This class combines the authentication details with additional configuration for the SharePoint API. ## Fields -| Field | Type | Required | Description | Example | -| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `format` | [models.DestinationS3GlueOutputFormat](../models/destinations3glueoutputformat.md) | :heavy_check_mark: | Format of the data output. See here for more details | | -| `glue_database` | *str* | :heavy_check_mark: | Name of the glue database for creating the tables, leave blank if no integration | airbyte_database | -| `s3_bucket_name` | *str* | :heavy_check_mark: | The name of the S3 bucket. Read more here. | airbyte_sync | -| `s3_bucket_path` | *str* | :heavy_check_mark: | Directory under the S3 bucket where data will be written. Read more here | data_sync/test | -| `access_key_id` | *Optional[str]* | :heavy_minus_sign: | The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here. | A012345678910EXAMPLE | -| `destination_type` | [models.S3Glue](../models/s3glue.md) | :heavy_check_mark: | N/A | | -| `file_name_pattern` | *Optional[str]* | :heavy_minus_sign: | The pattern allows you to set the file-name format for the S3 staging file(s) | {date} | -| `glue_serialization_library` | [Optional[models.SerializationLibrary]](../models/serializationlibrary.md) | :heavy_minus_sign: | The library that your query engine will use for reading and writing data in your lake. | | -| `s3_bucket_region` | [Optional[models.DestinationS3GlueS3BucketRegion]](../models/destinations3glues3bucketregion.md) | :heavy_minus_sign: | The region of the S3 bucket. See here for all region codes. | | -| `s3_endpoint` | *Optional[str]* | :heavy_minus_sign: | Your S3 endpoint url. Read more here | http://localhost:9000 | -| `s3_path_format` | *Optional[str]* | :heavy_minus_sign: | Format string on how data will be organized inside the S3 bucket directory. Read more here | ${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_ | -| `secret_access_key` | *Optional[str]* | :heavy_minus_sign: | The corresponding secret to the access key ID. Read more here | a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY | \ No newline at end of file +| Field | Type | Required | Description | Example | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `credentials` | [models.SourceSharepointEnterpriseAuthentication](../models/sourcesharepointenterpriseauthentication.md) | :heavy_check_mark: | Credentials for connecting to the One Drive API | | +| `streams` | List[[models.SourceSharepointEnterpriseFileBasedStreamConfig](../models/sourcesharepointenterprisefilebasedstreamconfig.md)] | :heavy_check_mark: | Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. | | +| `delivery_method` | [Optional[models.SourceSharepointEnterpriseDeliveryMethod]](../models/sourcesharepointenterprisedeliverymethod.md) | :heavy_minus_sign: | N/A | | +| `folder_path` | *Optional[str]* | :heavy_minus_sign: | Path to a specific folder within the drives to search for files. Leave empty to search all folders of the drives. This does not apply to shared items. | | +| `search_scope` | [Optional[models.SourceSharepointEnterpriseSearchScope]](../models/sourcesharepointenterprisesearchscope.md) | :heavy_minus_sign: | Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both. | | +| `site_url` | *Optional[str]* | :heavy_minus_sign: | Url of SharePoint site to search for files. Leave empty to search in the main site. Use 'https://.sharepoint.com/sites/' to iterate over all sites. | | +| `source_type` | [models.SourceSharepointEnterpriseSharepointEnterprise](../models/sourcesharepointenterprisesharepointenterprise.md) | :heavy_check_mark: | N/A | | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated. | 2021-01-01T00:00:00.000000Z | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseauthenticateviamicrosoftoauth.md b/docs/models/sourcesharepointenterpriseauthenticateviamicrosoftoauth.md new file mode 100644 index 00000000..7cd175a3 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseauthenticateviamicrosoftoauth.md @@ -0,0 +1,15 @@ +# SourceSharepointEnterpriseAuthenticateViaMicrosoftOAuth + +OAuthCredentials class to hold authentication details for Microsoft OAuth authentication. +This class uses pydantic for data validation and settings management. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | +| `client_id` | *str* | :heavy_check_mark: | Client ID of your Microsoft developer application | +| `client_secret` | *str* | :heavy_check_mark: | Client Secret of your Microsoft developer application | +| `tenant_id` | *str* | :heavy_check_mark: | Tenant ID of the Microsoft SharePoint user | +| `auth_type` | [Optional[models.SourceSharepointEnterpriseAuthType]](../models/sourcesharepointenterpriseauthtype.md) | :heavy_minus_sign: | N/A | +| `refresh_token` | *Optional[str]* | :heavy_minus_sign: | Refresh Token of your Microsoft developer application | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseauthentication.md b/docs/models/sourcesharepointenterpriseauthentication.md new file mode 100644 index 00000000..04518796 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseauthentication.md @@ -0,0 +1,19 @@ +# SourceSharepointEnterpriseAuthentication + +Credentials for connecting to the One Drive API + + +## Supported Types + +### `models.SourceSharepointEnterpriseAuthenticateViaMicrosoftOAuth` + +```python +value: models.SourceSharepointEnterpriseAuthenticateViaMicrosoftOAuth = /* values here */ +``` + +### `models.SourceSharepointEnterpriseServiceKeyAuthentication` + +```python +value: models.SourceSharepointEnterpriseServiceKeyAuthentication = /* values here */ +``` + diff --git a/docs/models/sourcesharepointenterpriseauthtype.md b/docs/models/sourcesharepointenterpriseauthtype.md new file mode 100644 index 00000000..49110477 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseauthtype.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseAuthType + + +## Values + +| Name | Value | +| -------- | -------- | +| `CLIENT` | Client | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseautogenerated.md b/docs/models/sourcesharepointenterpriseautogenerated.md new file mode 100644 index 00000000..ee429d4a --- /dev/null +++ b/docs/models/sourcesharepointenterpriseautogenerated.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseAutogenerated + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------- | +| `header_definition_type` | [Optional[models.SourceSharepointEnterpriseSchemasHeaderDefinitionType]](../models/sourcesharepointenterpriseschemasheaderdefinitiontype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseavroformat.md b/docs/models/sourcesharepointenterpriseavroformat.md new file mode 100644 index 00000000..54404216 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseavroformat.md @@ -0,0 +1,9 @@ +# SourceSharepointEnterpriseAvroFormat + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `double_as_string` | *Optional[bool]* | :heavy_minus_sign: | Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. | +| `filetype` | [Optional[models.SourceSharepointEnterpriseFiletype]](../models/sourcesharepointenterprisefiletype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterprisecopyrawfiles.md b/docs/models/sourcesharepointenterprisecopyrawfiles.md new file mode 100644 index 00000000..ae313513 --- /dev/null +++ b/docs/models/sourcesharepointenterprisecopyrawfiles.md @@ -0,0 +1,11 @@ +# SourceSharepointEnterpriseCopyRawFiles + +Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `delivery_type` | [Optional[models.SourceSharepointEnterpriseSchemasDeliveryType]](../models/sourcesharepointenterpriseschemasdeliverytype.md) | :heavy_minus_sign: | N/A | +| `preserve_directory_structure` | *Optional[bool]* | :heavy_minus_sign: | If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterprisecsvformat.md b/docs/models/sourcesharepointenterprisecsvformat.md new file mode 100644 index 00000000..9fcf4723 --- /dev/null +++ b/docs/models/sourcesharepointenterprisecsvformat.md @@ -0,0 +1,21 @@ +# SourceSharepointEnterpriseCSVFormat + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `delimiter` | *Optional[str]* | :heavy_minus_sign: | The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. | +| `double_quote` | *Optional[bool]* | :heavy_minus_sign: | Whether two quotes in a quoted CSV value denote a single quote in the data. | +| `encoding` | *Optional[str]* | :heavy_minus_sign: | The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options. | +| `escape_char` | *Optional[str]* | :heavy_minus_sign: | The character used for escaping special characters. To disallow escaping, leave this field blank. | +| `false_values` | List[*str*] | :heavy_minus_sign: | A set of case-sensitive strings that should be interpreted as false values. | +| `filetype` | [Optional[models.SourceSharepointEnterpriseSchemasFiletype]](../models/sourcesharepointenterpriseschemasfiletype.md) | :heavy_minus_sign: | N/A | +| `header_definition` | [Optional[models.SourceSharepointEnterpriseCSVHeaderDefinition]](../models/sourcesharepointenterprisecsvheaderdefinition.md) | :heavy_minus_sign: | How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. | +| `ignore_errors_on_fields_mismatch` | *Optional[bool]* | :heavy_minus_sign: | Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. | +| `null_values` | List[*str*] | :heavy_minus_sign: | A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. | +| `quote_char` | *Optional[str]* | :heavy_minus_sign: | The character used for quoting CSV values. To disallow quoting, make this field blank. | +| `skip_rows_after_header` | *Optional[int]* | :heavy_minus_sign: | The number of rows to skip after the header row. | +| `skip_rows_before_header` | *Optional[int]* | :heavy_minus_sign: | The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. | +| `strings_can_be_null` | *Optional[bool]* | :heavy_minus_sign: | Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself. | +| `true_values` | List[*str*] | :heavy_minus_sign: | A set of case-sensitive strings that should be interpreted as true values. | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterprisecsvheaderdefinition.md b/docs/models/sourcesharepointenterprisecsvheaderdefinition.md new file mode 100644 index 00000000..2500d78f --- /dev/null +++ b/docs/models/sourcesharepointenterprisecsvheaderdefinition.md @@ -0,0 +1,25 @@ +# SourceSharepointEnterpriseCSVHeaderDefinition + +How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. + + +## Supported Types + +### `models.SourceSharepointEnterpriseFromCSV` + +```python +value: models.SourceSharepointEnterpriseFromCSV = /* values here */ +``` + +### `models.SourceSharepointEnterpriseAutogenerated` + +```python +value: models.SourceSharepointEnterpriseAutogenerated = /* values here */ +``` + +### `models.SourceSharepointEnterpriseUserProvided` + +```python +value: models.SourceSharepointEnterpriseUserProvided = /* values here */ +``` + diff --git a/docs/models/sourcesharepointenterprisedeliverymethod.md b/docs/models/sourcesharepointenterprisedeliverymethod.md new file mode 100644 index 00000000..566cd4b4 --- /dev/null +++ b/docs/models/sourcesharepointenterprisedeliverymethod.md @@ -0,0 +1,23 @@ +# SourceSharepointEnterpriseDeliveryMethod + + +## Supported Types + +### `models.SourceSharepointEnterpriseReplicateRecords` + +```python +value: models.SourceSharepointEnterpriseReplicateRecords = /* values here */ +``` + +### `models.SourceSharepointEnterpriseCopyRawFiles` + +```python +value: models.SourceSharepointEnterpriseCopyRawFiles = /* values here */ +``` + +### `models.SourceSharepointEnterpriseReplicatePermissionsACL` + +```python +value: models.SourceSharepointEnterpriseReplicatePermissionsACL = /* values here */ +``` + diff --git a/docs/models/sourcesharepointenterprisedeliverytype.md b/docs/models/sourcesharepointenterprisedeliverytype.md new file mode 100644 index 00000000..56bed1cb --- /dev/null +++ b/docs/models/sourcesharepointenterprisedeliverytype.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseDeliveryType + + +## Values + +| Name | Value | +| ---------------------- | ---------------------- | +| `USE_RECORDS_TRANSFER` | use_records_transfer | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseexcelformat.md b/docs/models/sourcesharepointenterpriseexcelformat.md new file mode 100644 index 00000000..6a0fb4e8 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseexcelformat.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseExcelFormat + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `filetype` | [Optional[models.SourceSharepointEnterpriseSchemasStreamsFormatFormat6Filetype]](../models/sourcesharepointenterpriseschemasstreamsformatformat6filetype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterprisefilebasedstreamconfig.md b/docs/models/sourcesharepointenterprisefilebasedstreamconfig.md new file mode 100644 index 00000000..ea5f9d13 --- /dev/null +++ b/docs/models/sourcesharepointenterprisefilebasedstreamconfig.md @@ -0,0 +1,15 @@ +# SourceSharepointEnterpriseFileBasedStreamConfig + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `format` | [models.SourceSharepointEnterpriseFormat](../models/sourcesharepointenterpriseformat.md) | :heavy_check_mark: | The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. | +| `name` | *str* | :heavy_check_mark: | The name of the stream. | +| `days_to_sync_if_history_is_full` | *Optional[int]* | :heavy_minus_sign: | When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. | +| `globs` | List[*str*] | :heavy_minus_sign: | The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here. | +| `input_schema` | *Optional[str]* | :heavy_minus_sign: | The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files. | +| `recent_n_files_to_read_for_schema_discovery` | *Optional[int]* | :heavy_minus_sign: | The number of resent files which will be used to discover the schema for this stream. | +| `schemaless` | *Optional[bool]* | :heavy_minus_sign: | When enabled, syncs will not validate or structure records against the stream's schema. | +| `validation_policy` | [Optional[models.SourceSharepointEnterpriseValidationPolicy]](../models/sourcesharepointenterprisevalidationpolicy.md) | :heavy_minus_sign: | The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterprisefiletype.md b/docs/models/sourcesharepointenterprisefiletype.md new file mode 100644 index 00000000..a15c3590 --- /dev/null +++ b/docs/models/sourcesharepointenterprisefiletype.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseFiletype + + +## Values + +| Name | Value | +| ------ | ------ | +| `AVRO` | avro | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseformat.md b/docs/models/sourcesharepointenterpriseformat.md new file mode 100644 index 00000000..641d7763 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseformat.md @@ -0,0 +1,43 @@ +# SourceSharepointEnterpriseFormat + +The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. + + +## Supported Types + +### `models.SourceSharepointEnterpriseAvroFormat` + +```python +value: models.SourceSharepointEnterpriseAvroFormat = /* values here */ +``` + +### `models.SourceSharepointEnterpriseCSVFormat` + +```python +value: models.SourceSharepointEnterpriseCSVFormat = /* values here */ +``` + +### `models.SourceSharepointEnterpriseJsonlFormat` + +```python +value: models.SourceSharepointEnterpriseJsonlFormat = /* values here */ +``` + +### `models.SourceSharepointEnterpriseParquetFormat` + +```python +value: models.SourceSharepointEnterpriseParquetFormat = /* values here */ +``` + +### `models.SourceSharepointEnterpriseUnstructuredDocumentFormat` + +```python +value: models.SourceSharepointEnterpriseUnstructuredDocumentFormat = /* values here */ +``` + +### `models.SourceSharepointEnterpriseExcelFormat` + +```python +value: models.SourceSharepointEnterpriseExcelFormat = /* values here */ +``` + diff --git a/docs/models/sourcesharepointenterprisefromcsv.md b/docs/models/sourcesharepointenterprisefromcsv.md new file mode 100644 index 00000000..9c898273 --- /dev/null +++ b/docs/models/sourcesharepointenterprisefromcsv.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseFromCSV + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | +| `header_definition_type` | [Optional[models.SourceSharepointEnterpriseHeaderDefinitionType]](../models/sourcesharepointenterpriseheaderdefinitiontype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseheaderdefinitiontype.md b/docs/models/sourcesharepointenterpriseheaderdefinitiontype.md new file mode 100644 index 00000000..78f7ed60 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseheaderdefinitiontype.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseHeaderDefinitionType + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `FROM_CSV` | From CSV | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterprisejsonlformat.md b/docs/models/sourcesharepointenterprisejsonlformat.md new file mode 100644 index 00000000..bf494011 --- /dev/null +++ b/docs/models/sourcesharepointenterprisejsonlformat.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseJsonlFormat + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | +| `filetype` | [Optional[models.SourceSharepointEnterpriseSchemasStreamsFiletype]](../models/sourcesharepointenterpriseschemasstreamsfiletype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriselocal.md b/docs/models/sourcesharepointenterpriselocal.md new file mode 100644 index 00000000..a284fded --- /dev/null +++ b/docs/models/sourcesharepointenterpriselocal.md @@ -0,0 +1,10 @@ +# SourceSharepointEnterpriseLocal + +Process files locally, supporting `fast` and `ocr` modes. This is the default option. + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | +| `mode` | [Optional[models.SourceSharepointEnterpriseMode]](../models/sourcesharepointenterprisemode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterprisemode.md b/docs/models/sourcesharepointenterprisemode.md new file mode 100644 index 00000000..6d0a09de --- /dev/null +++ b/docs/models/sourcesharepointenterprisemode.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseMode + + +## Values + +| Name | Value | +| ------- | ------- | +| `LOCAL` | local | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseparquetformat.md b/docs/models/sourcesharepointenterpriseparquetformat.md new file mode 100644 index 00000000..dc7e889b --- /dev/null +++ b/docs/models/sourcesharepointenterpriseparquetformat.md @@ -0,0 +1,9 @@ +# SourceSharepointEnterpriseParquetFormat + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | +| `decimal_as_float` | *Optional[bool]* | :heavy_minus_sign: | Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. | +| `filetype` | [Optional[models.SourceSharepointEnterpriseSchemasStreamsFormatFiletype]](../models/sourcesharepointenterpriseschemasstreamsformatfiletype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseparsingstrategy.md b/docs/models/sourcesharepointenterpriseparsingstrategy.md new file mode 100644 index 00000000..645b6458 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseparsingstrategy.md @@ -0,0 +1,13 @@ +# SourceSharepointEnterpriseParsingStrategy + +The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `AUTO` | auto | +| `FAST` | fast | +| `OCR_ONLY` | ocr_only | +| `HI_RES` | hi_res | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseprocessing.md b/docs/models/sourcesharepointenterpriseprocessing.md new file mode 100644 index 00000000..edc4b91e --- /dev/null +++ b/docs/models/sourcesharepointenterpriseprocessing.md @@ -0,0 +1,13 @@ +# SourceSharepointEnterpriseProcessing + +Processing configuration + + +## Supported Types + +### `models.SourceSharepointEnterpriseLocal` + +```python +value: models.SourceSharepointEnterpriseLocal = /* values here */ +``` + diff --git a/docs/models/sourcesharepointenterprisereplicatepermissionsacl.md b/docs/models/sourcesharepointenterprisereplicatepermissionsacl.md new file mode 100644 index 00000000..854e281b --- /dev/null +++ b/docs/models/sourcesharepointenterprisereplicatepermissionsacl.md @@ -0,0 +1,11 @@ +# SourceSharepointEnterpriseReplicatePermissionsACL + +Sends one identity stream and one for more permissions (ACL) streams to the destination. This data can be used in downstream systems to recreate permission restrictions mirroring the original source. + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `delivery_type` | [Optional[models.SourceSharepointEnterpriseSchemasDeliveryMethodDeliveryType]](../models/sourcesharepointenterpriseschemasdeliverymethoddeliverytype.md) | :heavy_minus_sign: | N/A | +| `include_identities_stream` | *Optional[bool]* | :heavy_minus_sign: | This data can be used in downstream systems to recreate permission restrictions mirroring the original source | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterprisereplicaterecords.md b/docs/models/sourcesharepointenterprisereplicaterecords.md new file mode 100644 index 00000000..966daa65 --- /dev/null +++ b/docs/models/sourcesharepointenterprisereplicaterecords.md @@ -0,0 +1,10 @@ +# SourceSharepointEnterpriseReplicateRecords + +Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination. + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | +| `delivery_type` | [Optional[models.SourceSharepointEnterpriseDeliveryType]](../models/sourcesharepointenterprisedeliverytype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseschemasauthtype.md b/docs/models/sourcesharepointenterpriseschemasauthtype.md new file mode 100644 index 00000000..63ccd7d2 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseschemasauthtype.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseSchemasAuthType + + +## Values + +| Name | Value | +| --------- | --------- | +| `SERVICE` | Service | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseschemasdeliverymethoddeliverytype.md b/docs/models/sourcesharepointenterpriseschemasdeliverymethoddeliverytype.md new file mode 100644 index 00000000..8064aa24 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseschemasdeliverymethoddeliverytype.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseSchemasDeliveryMethodDeliveryType + + +## Values + +| Name | Value | +| -------------------------- | -------------------------- | +| `USE_PERMISSIONS_TRANSFER` | use_permissions_transfer | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseschemasdeliverytype.md b/docs/models/sourcesharepointenterpriseschemasdeliverytype.md new file mode 100644 index 00000000..b8d2fbe2 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseschemasdeliverytype.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseSchemasDeliveryType + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `USE_FILE_TRANSFER` | use_file_transfer | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseschemasfiletype.md b/docs/models/sourcesharepointenterpriseschemasfiletype.md new file mode 100644 index 00000000..48945f15 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseschemasfiletype.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseSchemasFiletype + + +## Values + +| Name | Value | +| ----- | ----- | +| `CSV` | csv | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseschemasheaderdefinitiontype.md b/docs/models/sourcesharepointenterpriseschemasheaderdefinitiontype.md new file mode 100644 index 00000000..f8b093a7 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseschemasheaderdefinitiontype.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseSchemasHeaderDefinitionType + + +## Values + +| Name | Value | +| --------------- | --------------- | +| `AUTOGENERATED` | Autogenerated | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseschemasstreamsfiletype.md b/docs/models/sourcesharepointenterpriseschemasstreamsfiletype.md new file mode 100644 index 00000000..ad38f179 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseschemasstreamsfiletype.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseSchemasStreamsFiletype + + +## Values + +| Name | Value | +| ------- | ------- | +| `JSONL` | jsonl | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseschemasstreamsformatfiletype.md b/docs/models/sourcesharepointenterpriseschemasstreamsformatfiletype.md new file mode 100644 index 00000000..d038ff32 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseschemasstreamsformatfiletype.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseSchemasStreamsFormatFiletype + + +## Values + +| Name | Value | +| --------- | --------- | +| `PARQUET` | parquet | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseschemasstreamsformatformat6filetype.md b/docs/models/sourcesharepointenterpriseschemasstreamsformatformat6filetype.md new file mode 100644 index 00000000..65df95d3 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseschemasstreamsformatformat6filetype.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseSchemasStreamsFormatFormat6Filetype + + +## Values + +| Name | Value | +| ------- | ------- | +| `EXCEL` | excel | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseschemasstreamsformatformatfiletype.md b/docs/models/sourcesharepointenterpriseschemasstreamsformatformatfiletype.md new file mode 100644 index 00000000..eb854583 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseschemasstreamsformatformatfiletype.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseSchemasStreamsFormatFormatFiletype + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `UNSTRUCTURED` | unstructured | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseschemasstreamsheaderdefinitiontype.md b/docs/models/sourcesharepointenterpriseschemasstreamsheaderdefinitiontype.md new file mode 100644 index 00000000..af0a5bbb --- /dev/null +++ b/docs/models/sourcesharepointenterpriseschemasstreamsheaderdefinitiontype.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseSchemasStreamsHeaderDefinitionType + + +## Values + +| Name | Value | +| --------------- | --------------- | +| `USER_PROVIDED` | User Provided | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterprisesearchscope.md b/docs/models/sourcesharepointenterprisesearchscope.md new file mode 100644 index 00000000..b647d7e5 --- /dev/null +++ b/docs/models/sourcesharepointenterprisesearchscope.md @@ -0,0 +1,12 @@ +# SourceSharepointEnterpriseSearchScope + +Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both. + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `ACCESSIBLE_DRIVES` | ACCESSIBLE_DRIVES | +| `SHARED_ITEMS` | SHARED_ITEMS | +| `ALL` | ALL | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseservicekeyauthentication.md b/docs/models/sourcesharepointenterpriseservicekeyauthentication.md new file mode 100644 index 00000000..0eed5ea5 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseservicekeyauthentication.md @@ -0,0 +1,15 @@ +# SourceSharepointEnterpriseServiceKeyAuthentication + +ServiceCredentials class for service key authentication. +This class is structured similarly to OAuthCredentials but for a different authentication method. + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `client_id` | *str* | :heavy_check_mark: | Client ID of your Microsoft developer application | +| `client_secret` | *str* | :heavy_check_mark: | Client Secret of your Microsoft developer application | +| `tenant_id` | *str* | :heavy_check_mark: | Tenant ID of the Microsoft SharePoint user | +| `user_principal_name` | *str* | :heavy_check_mark: | Special characters such as a period, comma, space, and the at sign (@) are converted to underscores (_). More details: https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls | +| `auth_type` | [Optional[models.SourceSharepointEnterpriseSchemasAuthType]](../models/sourcesharepointenterpriseschemasauthtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterprisesharepointenterprise.md b/docs/models/sourcesharepointenterprisesharepointenterprise.md new file mode 100644 index 00000000..0519e17e --- /dev/null +++ b/docs/models/sourcesharepointenterprisesharepointenterprise.md @@ -0,0 +1,8 @@ +# SourceSharepointEnterpriseSharepointEnterprise + + +## Values + +| Name | Value | +| ----------------------- | ----------------------- | +| `SHAREPOINT_ENTERPRISE` | sharepoint-enterprise | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseunstructureddocumentformat.md b/docs/models/sourcesharepointenterpriseunstructureddocumentformat.md new file mode 100644 index 00000000..c39c47c6 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseunstructureddocumentformat.md @@ -0,0 +1,13 @@ +# SourceSharepointEnterpriseUnstructuredDocumentFormat + +Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file. + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `filetype` | [Optional[models.SourceSharepointEnterpriseSchemasStreamsFormatFormatFiletype]](../models/sourcesharepointenterpriseschemasstreamsformatformatfiletype.md) | :heavy_minus_sign: | N/A | +| `processing` | [Optional[models.SourceSharepointEnterpriseProcessing]](../models/sourcesharepointenterpriseprocessing.md) | :heavy_minus_sign: | Processing configuration | +| `skip_unprocessable_files` | *Optional[bool]* | :heavy_minus_sign: | If true, skip files that cannot be parsed and pass the error message along as the _ab_source_file_parse_error field. If false, fail the sync. | +| `strategy` | [Optional[models.SourceSharepointEnterpriseParsingStrategy]](../models/sourcesharepointenterpriseparsingstrategy.md) | :heavy_minus_sign: | The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterpriseuserprovided.md b/docs/models/sourcesharepointenterpriseuserprovided.md new file mode 100644 index 00000000..743e0be0 --- /dev/null +++ b/docs/models/sourcesharepointenterpriseuserprovided.md @@ -0,0 +1,9 @@ +# SourceSharepointEnterpriseUserProvided + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `column_names` | List[*str*] | :heavy_check_mark: | The column names that will be used while emitting the CSV records | +| `header_definition_type` | [Optional[models.SourceSharepointEnterpriseSchemasStreamsHeaderDefinitionType]](../models/sourcesharepointenterpriseschemasstreamsheaderdefinitiontype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterprisevalidationpolicy.md b/docs/models/sourcesharepointenterprisevalidationpolicy.md new file mode 100644 index 00000000..70917d24 --- /dev/null +++ b/docs/models/sourcesharepointenterprisevalidationpolicy.md @@ -0,0 +1,12 @@ +# SourceSharepointEnterpriseValidationPolicy + +The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `EMIT_RECORD` | Emit Record | +| `SKIP_RECORD` | Skip Record | +| `WAIT_FOR_DISCOVER` | Wait for Discover | \ No newline at end of file diff --git a/docs/models/sourceshipstation.md b/docs/models/sourceshipstation.md new file mode 100644 index 00000000..4df863a7 --- /dev/null +++ b/docs/models/sourceshipstation.md @@ -0,0 +1,11 @@ +# SourceShipstation + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `username` | *str* | :heavy_check_mark: | N/A | +| `password` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `source_type` | [models.Shipstation](../models/shipstation.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceshopify.md b/docs/models/sourceshopify.md index 95782bf9..90085216 100644 --- a/docs/models/sourceshopify.md +++ b/docs/models/sourceshopify.md @@ -9,7 +9,7 @@ | `bulk_window_in_days` | *Optional[int]* | :heavy_minus_sign: | Defines what would be a date range per single BULK Job | | | `credentials` | [Optional[models.ShopifyAuthorizationMethod]](../models/shopifyauthorizationmethod.md) | :heavy_minus_sign: | The authorization method to use to retrieve data from Shopify | | | `fetch_transactions_user_id` | *Optional[bool]* | :heavy_minus_sign: | Defines which API type (REST/BULK) to use to fetch `Transactions` data. If you are a `Shopify Plus` user, leave the default value to speed up the fetch. | | -| `job_checkpoint_interval` | *Optional[int]* | :heavy_minus_sign: | The threshold, after which the single BULK Job should be checkpointed. | | +| `job_checkpoint_interval` | *Optional[int]* | :heavy_minus_sign: | The threshold, after which the single BULK Job should be checkpointed (min: 15k, max: 1M) | | | `job_product_variants_include_pres_prices` | *Optional[bool]* | :heavy_minus_sign: | If enabled, the `Product Variants` stream attempts to include `Presentment prices` field (may affect the performance). | | | `job_termination_threshold` | *Optional[int]* | :heavy_minus_sign: | The max time in seconds, after which the single BULK Job should be `CANCELED` and retried. The bigger the value the longer the BULK Job is allowed to run. | | | `source_type` | [models.SourceShopifyShopify](../models/sourceshopifyshopify.md) | :heavy_check_mark: | N/A | | diff --git a/docs/models/sourceshopwired.md b/docs/models/sourceshopwired.md new file mode 100644 index 00000000..757b97f0 --- /dev/null +++ b/docs/models/sourceshopwired.md @@ -0,0 +1,11 @@ +# SourceShopwired + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your API Key, which acts as the username for Basic Authentication. You can find it in your ShopWired account under API settings. | +| `api_secret` | *str* | :heavy_check_mark: | Your API Secret, which acts as the password for Basic Authentication. You can find it in your ShopWired account under API settings. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Shopwired](../models/shopwired.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceshutterstock.md b/docs/models/sourceshutterstock.md new file mode 100644 index 00000000..22d6ec20 --- /dev/null +++ b/docs/models/sourceshutterstock.md @@ -0,0 +1,14 @@ +# SourceShutterstock + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | +| `api_token` | *str* | :heavy_check_mark: | Your OAuth 2.0 token for accessing the Shutterstock API. Obtain this token from your Shutterstock developer account. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `query_for_audio_search` | *Optional[str]* | :heavy_minus_sign: | The query for image search | +| `query_for_catalog_search` | *Optional[str]* | :heavy_minus_sign: | The query for catalog search | +| `query_for_image_search` | *Optional[str]* | :heavy_minus_sign: | The query for image search | +| `query_for_video_search` | *Optional[str]* | :heavy_minus_sign: | The Query for `videos_search` stream | +| `source_type` | [models.Shutterstock](../models/shutterstock.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesignnow.md b/docs/models/sourcesignnow.md new file mode 100644 index 00000000..677d2cb0 --- /dev/null +++ b/docs/models/sourcesignnow.md @@ -0,0 +1,12 @@ +# SourceSignnow + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key_id` | *str* | :heavy_check_mark: | Api key which could be found in API section after enlarging keys section | +| `auth_token` | *str* | :heavy_check_mark: | The authorization token is needed for `signing_links` stream which could be seen from enlarged view of `https://app.signnow.com/webapp/api-dashboard/keys` | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `name_filter_for_documents` | List[*Any*] | :heavy_minus_sign: | Name filter for documents stream | +| `source_type` | [models.Signnow](../models/signnow.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceslack.md b/docs/models/sourceslack.md index 41ac1bf7..ba00a957 100644 --- a/docs/models/sourceslack.md +++ b/docs/models/sourceslack.md @@ -3,12 +3,14 @@ ## Fields -| Field | Type | Required | Description | Example | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. | 2017-01-25T00:00:00Z | -| `channel_filter` | List[*str*] | :heavy_minus_sign: | A channel name list (without leading '#' char) which limit the channels from which you'd like to sync. Empty list means no filter. | channel_one | -| `credentials` | [Optional[models.SourceSlackAuthenticationMechanism]](../models/sourceslackauthenticationmechanism.md) | :heavy_minus_sign: | Choose how to authenticate into Slack | | -| `include_private_channels` | *Optional[bool]* | :heavy_minus_sign: | Whether to read information from private channels that the bot is already in. If false, only public channels will be read. If true, the bot must be manually added to private channels. | | -| `join_channels` | *Optional[bool]* | :heavy_minus_sign: | Whether to join all channels or to sync data only from channels the bot is already in. If false, you'll need to manually add the bot to all the channels from which you'd like to sync messages. | | -| `lookback_window` | *Optional[int]* | :heavy_minus_sign: | How far into the past to look for messages in threads, default is 0 days | 7 | -| `source_type` | [models.SourceSlackSlack](../models/sourceslackslack.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. | 2017-01-25T00:00:00Z | +| `channel_filter` | List[*str*] | :heavy_minus_sign: | A channel name list (without leading '#' char) which limit the channels from which you'd like to sync. Empty list means no filter. | channel_one | +| `channel_messages_window_size` | *Optional[int]* | :heavy_minus_sign: | The size (in days) of the date window that will be used while syncing data from the channel messages stream. A smaller window will allow for greater parallelization when syncing records, but can lead to rate limiting errors. | 30 | +| `credentials` | [Optional[models.SourceSlackAuthenticationMechanism]](../models/sourceslackauthenticationmechanism.md) | :heavy_minus_sign: | Choose how to authenticate into Slack | | +| `include_private_channels` | *Optional[bool]* | :heavy_minus_sign: | Whether to read information from private channels that the bot is already in. If false, only public channels will be read. If true, the bot must be manually added to private channels. | | +| `join_channels` | *Optional[bool]* | :heavy_minus_sign: | Whether to join all channels or to sync data only from channels the bot is already in. If false, you'll need to manually add the bot to all the channels from which you'd like to sync messages. | | +| `lookback_window` | *Optional[int]* | :heavy_minus_sign: | How far into the past to look for messages in threads, default is 0 days | 7 | +| `num_workers` | *Optional[int]* | :heavy_minus_sign: | The number of worker threads to use for the sync. | 2 | +| `source_type` | [models.SourceSlackSlack](../models/sourceslackslack.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcesmartsheets.md b/docs/models/sourcesmartsheets.md index e28e9f0f..2b99b60f 100644 --- a/docs/models/sourcesmartsheets.md +++ b/docs/models/sourcesmartsheets.md @@ -3,9 +3,10 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | -| `credentials` | [models.SourceSmartsheetsAuthorizationMethod](../models/sourcesmartsheetsauthorizationmethod.md) | :heavy_check_mark: | N/A | -| `spreadsheet_id` | *str* | :heavy_check_mark: | The spreadsheet ID. Find it by opening the spreadsheet then navigating to File > Properties | -| `metadata_fields` | List[[models.Validenums](../models/validenums.md)] | :heavy_minus_sign: | A List of available columns which metadata can be pulled from. | -| `source_type` | [models.SourceSmartsheetsSmartsheets](../models/sourcesmartsheetssmartsheets.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | +| `credentials` | [models.SourceSmartsheetsAuthorizationMethod](../models/sourcesmartsheetsauthorizationmethod.md) | :heavy_check_mark: | N/A | +| `spreadsheet_id` | *str* | :heavy_check_mark: | The spreadsheet ID. Find it by opening the spreadsheet then navigating to File > Properties | +| `is_report` | *Optional[bool]* | :heavy_minus_sign: | If true, the source will treat the provided sheet_id as a report. If false, the source will treat the provided sheet_id as a sheet. | +| `metadata_fields` | List[[models.Validenums](../models/validenums.md)] | :heavy_minus_sign: | A List of available columns which metadata can be pulled from. | +| `source_type` | [models.SourceSmartsheetsSmartsheets](../models/sourcesmartsheetssmartsheets.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesnapchatmarketing.md b/docs/models/sourcesnapchatmarketing.md index 460c7374..928e54d3 100644 --- a/docs/models/sourcesnapchatmarketing.md +++ b/docs/models/sourcesnapchatmarketing.md @@ -9,7 +9,9 @@ | `client_secret` | *str* | :heavy_check_mark: | The Client Secret of your Snapchat developer application. | | | `refresh_token` | *str* | :heavy_check_mark: | Refresh Token to renew the expired Access Token. | | | `action_report_time` | [Optional[models.ActionReportTime]](../models/actionreporttime.md) | :heavy_minus_sign: | Specifies the principle for conversion reporting. | | +| `ad_account_ids` | List[*Any*] | :heavy_minus_sign: | Ad Account IDs of the ad accounts to retrieve | | | `end_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | Date in the format 2017-01-25. Any data after this date will not be replicated. | 2022-01-30 | +| `organization_ids` | List[*Any*] | :heavy_minus_sign: | The IDs of the organizations to retrieve | | | `source_type` | [models.SourceSnapchatMarketingSnapchatMarketing](../models/sourcesnapchatmarketingsnapchatmarketing.md) | :heavy_check_mark: | N/A | | | `start_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | Date in the format 2022-01-01. Any data before this date will not be replicated. | 2022-01-01 | | `swipe_up_attribution_window` | [Optional[models.SwipeUpAttributionWindow]](../models/swipeupattributionwindow.md) | :heavy_minus_sign: | Attribution window for swipe ups. | | diff --git a/docs/models/sourcesnowflake.md b/docs/models/sourcesnowflake.md index c1d8479b..554ff6c1 100644 --- a/docs/models/sourcesnowflake.md +++ b/docs/models/sourcesnowflake.md @@ -3,13 +3,17 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `database` | *str* | :heavy_check_mark: | The database you created for Airbyte to access data. | AIRBYTE_DATABASE | -| `host` | *str* | :heavy_check_mark: | The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com). | accountname.us-east-2.aws.snowflakecomputing.com | -| `role` | *str* | :heavy_check_mark: | The role you created for Airbyte to access Snowflake. | AIRBYTE_ROLE | -| `warehouse` | *str* | :heavy_check_mark: | The warehouse you created for Airbyte to access data. | AIRBYTE_WAREHOUSE | -| `credentials` | [Optional[models.SourceSnowflakeAuthorizationMethod]](../models/sourcesnowflakeauthorizationmethod.md) | :heavy_minus_sign: | N/A | | -| `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). | | -| `schema` | *Optional[str]* | :heavy_minus_sign: | The source Snowflake schema tables. Leave empty to access tables from multiple schemas. | AIRBYTE_SCHEMA | -| `source_type` | [models.SourceSnowflakeSnowflake](../models/sourcesnowflakesnowflake.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `database` | *str* | :heavy_check_mark: | The database you created for Airbyte to access data. | +| `host` | *str* | :heavy_check_mark: | The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com). | +| `role` | *str* | :heavy_check_mark: | The role you created for Airbyte to access Snowflake. | +| `warehouse` | *str* | :heavy_check_mark: | The warehouse you created for Airbyte to access data. | +| `check_privileges` | *Optional[bool]* | :heavy_minus_sign: | When this feature is enabled, during schema discovery the connector will query each table or view individually to check access privileges and inaccessible tables, views, or columns therein will be removed. In large schemas, this might cause schema discovery to take too long, in which case it might be advisable to disable this feature. | +| `checkpoint_target_interval_seconds` | *Optional[int]* | :heavy_minus_sign: | How often (in seconds) a stream should checkpoint, when possible. | +| `concurrency` | *Optional[int]* | :heavy_minus_sign: | Maximum number of concurrent queries to the database. | +| `credentials` | [Optional[models.SourceSnowflakeAuthorizationMethod]](../models/sourcesnowflakeauthorizationmethod.md) | :heavy_minus_sign: | N/A | +| `cursor` | [Optional[models.SourceSnowflakeUpdateMethod]](../models/sourcesnowflakeupdatemethod.md) | :heavy_minus_sign: | Configures how data is extracted from the database. | +| `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). | +| `schema` | *Optional[str]* | :heavy_minus_sign: | The source Snowflake schema tables. Leave empty to access tables from multiple schemas. | +| `source_type` | [models.SourceSnowflakeSnowflake](../models/sourcesnowflakesnowflake.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesnowflakeauthorizationmethod.md b/docs/models/sourcesnowflakeauthorizationmethod.md index 852cfa63..3e79b2c7 100644 --- a/docs/models/sourcesnowflakeauthorizationmethod.md +++ b/docs/models/sourcesnowflakeauthorizationmethod.md @@ -3,12 +3,6 @@ ## Supported Types -### `models.SourceSnowflakeOAuth20` - -```python -value: models.SourceSnowflakeOAuth20 = /* values here */ -``` - ### `models.SourceSnowflakeKeyPairAuthentication` ```python diff --git a/docs/models/sourcesnowflakeauthtype.md b/docs/models/sourcesnowflakeauthtype.md index 0371d1af..780b6e9c 100644 --- a/docs/models/sourcesnowflakeauthtype.md +++ b/docs/models/sourcesnowflakeauthtype.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| -------- | -------- | -| `O_AUTH` | OAuth | \ No newline at end of file +| Name | Value | +| ------------------------- | ------------------------- | +| `KEY_PAIR_AUTHENTICATION` | Key Pair Authentication | \ No newline at end of file diff --git a/docs/models/sourcesnowflakecursormethod.md b/docs/models/sourcesnowflakecursormethod.md new file mode 100644 index 00000000..4b47021a --- /dev/null +++ b/docs/models/sourcesnowflakecursormethod.md @@ -0,0 +1,8 @@ +# SourceSnowflakeCursorMethod + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `USER_DEFINED` | user_defined | \ No newline at end of file diff --git a/docs/models/sourcesnowflakekeypairauthentication.md b/docs/models/sourcesnowflakekeypairauthentication.md index 5863dc2a..f97969b9 100644 --- a/docs/models/sourcesnowflakekeypairauthentication.md +++ b/docs/models/sourcesnowflakekeypairauthentication.md @@ -3,9 +3,10 @@ ## Fields -| Field | Type | Required | Description | Example | -| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `private_key` | *str* | :heavy_check_mark: | RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key. | | -| `username` | *str* | :heavy_check_mark: | The username you created to allow Airbyte to access the database. | AIRBYTE_USER | -| `auth_type` | [Optional[models.SourceSnowflakeSchemasAuthType]](../models/sourcesnowflakeschemasauthtype.md) | :heavy_minus_sign: | N/A | | -| `private_key_password` | *Optional[str]* | :heavy_minus_sign: | Passphrase for private key | | \ No newline at end of file +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `private_key` | *str* | :heavy_check_mark: | RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key. | +| `username` | *str* | :heavy_check_mark: | The username you created to allow Airbyte to access the database. | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `auth_type` | [Optional[models.SourceSnowflakeAuthType]](../models/sourcesnowflakeauthtype.md) | :heavy_minus_sign: | N/A | +| `private_key_password` | *Optional[str]* | :heavy_minus_sign: | Passphrase for private key | \ No newline at end of file diff --git a/docs/models/sourcesnowflakeoauth20.md b/docs/models/sourcesnowflakeoauth20.md deleted file mode 100644 index b4776531..00000000 --- a/docs/models/sourcesnowflakeoauth20.md +++ /dev/null @@ -1,12 +0,0 @@ -# SourceSnowflakeOAuth20 - - -## Fields - -| Field | Type | Required | Description | -| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | -| `client_id` | *str* | :heavy_check_mark: | The Client ID of your Snowflake developer application. | -| `client_secret` | *str* | :heavy_check_mark: | The Client Secret of your Snowflake developer application. | -| `access_token` | *Optional[str]* | :heavy_minus_sign: | Access Token for making authenticated requests. | -| `auth_type` | [models.SourceSnowflakeAuthType](../models/sourcesnowflakeauthtype.md) | :heavy_check_mark: | N/A | -| `refresh_token` | *Optional[str]* | :heavy_minus_sign: | Refresh Token for making authenticated requests. | \ No newline at end of file diff --git a/docs/models/sourcesnowflakescanchangeswithuserdefinedcursor.md b/docs/models/sourcesnowflakescanchangeswithuserdefinedcursor.md new file mode 100644 index 00000000..7a3c7859 --- /dev/null +++ b/docs/models/sourcesnowflakescanchangeswithuserdefinedcursor.md @@ -0,0 +1,11 @@ +# SourceSnowflakeScanChangesWithUserDefinedCursor + +Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `cursor_method` | [Optional[models.SourceSnowflakeCursorMethod]](../models/sourcesnowflakecursormethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesnowflakeschemasauthtype.md b/docs/models/sourcesnowflakeschemasauthtype.md index 65f28e75..518e93bd 100644 --- a/docs/models/sourcesnowflakeschemasauthtype.md +++ b/docs/models/sourcesnowflakeschemasauthtype.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------------------------- | ------------------------- | -| `KEY_PAIR_AUTHENTICATION` | Key Pair Authentication | \ No newline at end of file +| Name | Value | +| ------------------- | ------------------- | +| `USERNAME_PASSWORD` | username/password | \ No newline at end of file diff --git a/docs/models/sourcesnowflakeupdatemethod.md b/docs/models/sourcesnowflakeupdatemethod.md new file mode 100644 index 00000000..a215e29d --- /dev/null +++ b/docs/models/sourcesnowflakeupdatemethod.md @@ -0,0 +1,13 @@ +# SourceSnowflakeUpdateMethod + +Configures how data is extracted from the database. + + +## Supported Types + +### `models.SourceSnowflakeScanChangesWithUserDefinedCursor` + +```python +value: models.SourceSnowflakeScanChangesWithUserDefinedCursor = /* values here */ +``` + diff --git a/docs/models/sourcesnowflakeusernameandpassword.md b/docs/models/sourcesnowflakeusernameandpassword.md index 6d3db2ab..219d9969 100644 --- a/docs/models/sourcesnowflakeusernameandpassword.md +++ b/docs/models/sourcesnowflakeusernameandpassword.md @@ -3,8 +3,9 @@ ## Fields -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | -| `password` | *str* | :heavy_check_mark: | The password associated with the username. | | -| `username` | *str* | :heavy_check_mark: | The username you created to allow Airbyte to access the database. | AIRBYTE_USER | -| `auth_type` | [models.SourceSnowflakeSchemasCredentialsAuthType](../models/sourcesnowflakeschemascredentialsauthtype.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | +| `password` | *str* | :heavy_check_mark: | The password associated with the username. | +| `username` | *str* | :heavy_check_mark: | The username you created to allow Airbyte to access the database. | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `auth_type` | [Optional[models.SourceSnowflakeSchemasAuthType]](../models/sourcesnowflakeschemasauthtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcespotifyads.md b/docs/models/sourcespotifyads.md new file mode 100644 index 00000000..ad550994 --- /dev/null +++ b/docs/models/sourcespotifyads.md @@ -0,0 +1,14 @@ +# SourceSpotifyAds + + +## Fields + +| Field | Type | Required | Description | Example | +| -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | +| `ad_account_id` | *str* | :heavy_check_mark: | The ID of the Spotify Ad Account you want to sync data from. | 03561a07-cb0a-4354-b751-88512a6f4d79 | +| `client_id` | *str* | :heavy_check_mark: | The Client ID of your Spotify Developer application. | | +| `client_secret` | *str* | :heavy_check_mark: | The Client Secret of your Spotify Developer application. | | +| `fields` | List[[models.Fields](../models/fields.md)] | :heavy_check_mark: | List of fields to include in the campaign performance report. Choose from available metrics. | [
"IMPRESSIONS",
"CLICKS",
"SPEND",
"CTR"
] | +| `refresh_token` | *str* | :heavy_check_mark: | The Refresh Token obtained from the initial OAuth 2.0 authorization flow. | | +| `start_date` | *str* | :heavy_check_mark: | The date to start syncing data from, in YYYY-MM-DD format. | 2024-01-01 | +| `source_type` | [models.SpotifyAds](../models/spotifyads.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcesvix.md b/docs/models/sourcesvix.md new file mode 100644 index 00000000..c73d7d06 --- /dev/null +++ b/docs/models/sourcesvix.md @@ -0,0 +1,10 @@ +# SourceSvix + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | API key or access token | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Svix](../models/svix.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcetavus.md b/docs/models/sourcetavus.md new file mode 100644 index 00000000..d0616cec --- /dev/null +++ b/docs/models/sourcetavus.md @@ -0,0 +1,10 @@ +# SourceTavus + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your Tavus API key. You can find this in your Tavus account settings or API dashboard. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Tavus](../models/tavus.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcethinkificcourses.md b/docs/models/sourcethinkificcourses.md new file mode 100644 index 00000000..ab40ff34 --- /dev/null +++ b/docs/models/sourcethinkificcourses.md @@ -0,0 +1,10 @@ +# SourceThinkificCourses + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | +| `x_auth_subdomain` | *str* | :heavy_check_mark: | N/A | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `source_type` | [models.ThinkificCourses](../models/thinkificcourses.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcethrivelearning.md b/docs/models/sourcethrivelearning.md new file mode 100644 index 00000000..c7912d69 --- /dev/null +++ b/docs/models/sourcethrivelearning.md @@ -0,0 +1,11 @@ +# SourceThriveLearning + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `username` | *str* | :heavy_check_mark: | Your website Tenant ID (eu-west-000000 please contact support for your tenant) | +| `password` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `source_type` | [models.ThriveLearning](../models/thrivelearning.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcetimely.md b/docs/models/sourcetimely.md index bc175851..856b338c 100644 --- a/docs/models/sourcetimely.md +++ b/docs/models/sourcetimely.md @@ -3,9 +3,9 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------ | ------------------------------------ | ------------------------------------ | ------------------------------------ | ------------------------------------ | -| `account_id` | *str* | :heavy_check_mark: | Timely account id | | -| `bearer_token` | *str* | :heavy_check_mark: | Timely bearer token | | -| `start_date` | *str* | :heavy_check_mark: | start date | 2022-05-06 | -| `source_type` | [models.Timely](../models/timely.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `account_id` | *str* | :heavy_check_mark: | The Account ID for your Timely account | +| `bearer_token` | *str* | :heavy_check_mark: | The Bearer Token for your Timely account | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | Earliest date from which you want to pull data from. | +| `source_type` | [models.Timely](../models/timely.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcetmdb.md b/docs/models/sourcetmdb.md new file mode 100644 index 00000000..ae425fd2 --- /dev/null +++ b/docs/models/sourcetmdb.md @@ -0,0 +1,12 @@ +# SourceTmdb + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | +| `api_key` | *str* | :heavy_check_mark: | API Key from tmdb account | | +| `language` | *str* | :heavy_check_mark: | Language expressed in ISO 639-1 scheme, Mandate for required streams (Example en-US) | en-US | +| `movie_id` | *str* | :heavy_check_mark: | Target movie ID, Mandate for movie streams (Example is 550) | 550 | +| `query` | *str* | :heavy_check_mark: | Target movie ID, Mandate for search streams | Marvel | +| `source_type` | [models.Tmdb](../models/tmdb.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcetoggl.md b/docs/models/sourcetoggl.md new file mode 100644 index 00000000..f9db3be8 --- /dev/null +++ b/docs/models/sourcetoggl.md @@ -0,0 +1,13 @@ +# SourceToggl + + +## Fields + +| Field | Type | Required | Description | Example | +| --------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------- | +| `api_token` | *str* | :heavy_check_mark: | Your API Token. See here. The token is case sensitive. | | +| `end_date` | *str* | :heavy_check_mark: | To retrieve time entries created before the given date (inclusive). | YYYY-MM-DD | +| `organization_id` | *int* | :heavy_check_mark: | Your organization id. See here. | | +| `start_date` | *str* | :heavy_check_mark: | To retrieve time entries created after the given date (inclusive). | YYYY-MM-DD | +| `workspace_id` | *int* | :heavy_check_mark: | Your workspace id. See here. | | +| `source_type` | [models.Toggl](../models/toggl.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcetwelvedata.md b/docs/models/sourcetwelvedata.md index 5bd27b39..0a752079 100644 --- a/docs/models/sourcetwelvedata.md +++ b/docs/models/sourcetwelvedata.md @@ -8,6 +8,6 @@ | `api_key` | *str* | :heavy_check_mark: | N/A | | `country` | *Optional[str]* | :heavy_minus_sign: | Where instrument is traded | | `exchange` | *Optional[str]* | :heavy_minus_sign: | Where instrument is traded | -| `interval` | [Optional[models.Interval]](../models/interval.md) | :heavy_minus_sign: | Between two consecutive points in time series Supports: 1min, 5min, 15min, 30min, 45min, 1h, 2h, 4h, 1day, 1week, 1month | +| `interval` | [Optional[models.SourceTwelveDataInterval]](../models/sourcetwelvedatainterval.md) | :heavy_minus_sign: | Between two consecutive points in time series Supports: 1min, 5min, 15min, 30min, 45min, 1h, 2h, 4h, 1day, 1week, 1month | | `source_type` | [models.TwelveData](../models/twelvedata.md) | :heavy_check_mark: | N/A | | `symbol` | *Optional[str]* | :heavy_minus_sign: | Ticker of the instrument | \ No newline at end of file diff --git a/docs/models/sourcetwelvedatainterval.md b/docs/models/sourcetwelvedatainterval.md new file mode 100644 index 00000000..61952ba3 --- /dev/null +++ b/docs/models/sourcetwelvedatainterval.md @@ -0,0 +1,20 @@ +# SourceTwelveDataInterval + +Between two consecutive points in time series Supports: 1min, 5min, 15min, 30min, 45min, 1h, 2h, 4h, 1day, 1week, 1month + + +## Values + +| Name | Value | +| --------------- | --------------- | +| `ONEMIN` | 1min | +| `FIVEMIN` | 5min | +| `FIFTEENMIN` | 15min | +| `THIRTYMIN` | 30min | +| `FORTY_FIVEMIN` | 45min | +| `ONEH` | 1h | +| `TWOH` | 2h | +| `FOURH` | 4h | +| `ONEDAY` | 1day | +| `ONEWEEK` | 1week | +| `ONEMONTH` | 1month | \ No newline at end of file diff --git a/docs/models/sourcetyntecsms.md b/docs/models/sourcetyntecsms.md new file mode 100644 index 00000000..3304a713 --- /dev/null +++ b/docs/models/sourcetyntecsms.md @@ -0,0 +1,12 @@ +# SourceTyntecSms + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your Tyntec API Key. See here | +| `from_` | *str* | :heavy_check_mark: | The phone number of the SMS message sender (international). | +| `to` | *str* | :heavy_check_mark: | The phone number of the SMS message recipient (international). | +| `message` | *Optional[str]* | :heavy_minus_sign: | The content of the SMS message to be sent. | +| `source_type` | [models.TyntecSms](../models/tyntecsms.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceuptick.md b/docs/models/sourceuptick.md new file mode 100644 index 00000000..ca7eae1e --- /dev/null +++ b/docs/models/sourceuptick.md @@ -0,0 +1,16 @@ +# SourceUptick + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------- | +| `base_url` | *str* | :heavy_check_mark: | Ex: https://demo-fire.onuptick.com/ | +| `client_id` | *str* | :heavy_check_mark: | N/A | +| `client_refresh_token` | *str* | :heavy_check_mark: | N/A | +| `client_secret` | *str* | :heavy_check_mark: | N/A | +| `end_date` | *Optional[str]* | :heavy_minus_sign: | Fetch data up until this date | +| `oauth_access_token` | *Optional[str]* | :heavy_minus_sign: | The current access token. This field might be overridden by the connector based on the token refresh endpoint response. | +| `oauth_token_expiry_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date the current access token expires in. This field might be overridden by the connector based on the token refresh endpoint response. | +| `source_type` | [models.Uptick](../models/uptick.md) | :heavy_check_mark: | N/A | +| `start_date` | *Optional[str]* | :heavy_minus_sign: | Fetch data starting from this date (by default 2025-01-01) | \ No newline at end of file diff --git a/docs/models/sourcevitally.md b/docs/models/sourcevitally.md index 185496b7..cde59e16 100644 --- a/docs/models/sourcevitally.md +++ b/docs/models/sourcevitally.md @@ -3,8 +3,10 @@ ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | -| `api_key` | *str* | :heavy_check_mark: | The API Token for a Vitally account. | -| `status` | [models.SourceVitallyStatus](../models/sourcevitallystatus.md) | :heavy_check_mark: | Status of the Vitally accounts. One of the following values; active, churned, activeOrChurned. | -| `source_type` | [models.Vitally](../models/vitally.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | +| `domain` | *str* | :heavy_check_mark: | Provide only the subdomain part, like https://{your-custom-subdomain}.rest.vitally.io/. Keep empty if you don't have a subdomain. | +| `secret_token` | *str* | :heavy_check_mark: | sk_live_secret_token | +| `status` | [models.SourceVitallyStatus](../models/sourcevitallystatus.md) | :heavy_check_mark: | Status of the Vitally accounts. One of the following values; active, churned, activeOrChurned. | +| `basic_auth_header` | *Optional[str]* | :heavy_minus_sign: | Basic Auth Header | +| `source_type` | [models.Vitally](../models/vitally.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/hadoopcatalogusehierarchicalfilesystemsassameasstorageconfig.md b/docs/models/sourcewatchmode.md similarity index 57% rename from docs/models/hadoopcatalogusehierarchicalfilesystemsassameasstorageconfig.md rename to docs/models/sourcewatchmode.md index 342868a8..130b22e2 100644 --- a/docs/models/hadoopcatalogusehierarchicalfilesystemsassameasstorageconfig.md +++ b/docs/models/sourcewatchmode.md @@ -1,11 +1,11 @@ -# HadoopCatalogUseHierarchicalFileSystemsAsSameAsStorageConfig - -A Hadoop catalog doesn’t need to connect to a Hive MetaStore, but can only be used with HDFS or similar file systems that support atomic rename. +# SourceWatchmode ## Fields -| Field | Type | Required | Description | Example | -| ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | -| `catalog_type` | [Optional[models.DestinationIcebergCatalogType]](../models/destinationicebergcatalogtype.md) | :heavy_minus_sign: | N/A | | -| `database` | *Optional[str]* | :heavy_minus_sign: | The default database tables are written to if the source does not specify a namespace. The usual value for this field is "default". | default | \ No newline at end of file +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your API key for authenticating with the Watchmode API. You can request a free API key at https://api.watchmode.com/requestApiKey/. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `search_val` | *Optional[str]* | :heavy_minus_sign: | The name value for search stream | +| `source_type` | [models.Watchmode](../models/watchmode.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceworkday.md b/docs/models/sourceworkday.md new file mode 100644 index 00000000..d4e594d5 --- /dev/null +++ b/docs/models/sourceworkday.md @@ -0,0 +1,11 @@ +# SourceWorkday + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `credentials` | [models.SourceWorkdayAuthentication](../models/sourceworkdayauthentication.md) | :heavy_check_mark: | Report Based Streams and REST API Streams use different methods of Authentication. Choose streams type you want to sync and provide needed credentials for them. | +| `host` | *str* | :heavy_check_mark: | N/A | +| `tenant_id` | *str* | :heavy_check_mark: | N/A | +| `source_type` | [models.Workday](../models/workday.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceworkdayauthentication.md b/docs/models/sourceworkdayauthentication.md new file mode 100644 index 00000000..106a3f48 --- /dev/null +++ b/docs/models/sourceworkdayauthentication.md @@ -0,0 +1,19 @@ +# SourceWorkdayAuthentication + +Report Based Streams and REST API Streams use different methods of Authentication. Choose streams type you want to sync and provide needed credentials for them. + + +## Supported Types + +### `models.ReportBasedStreams` + +```python +value: models.ReportBasedStreams = /* values here */ +``` + +### `models.RESTAPIStreams` + +```python +value: models.RESTAPIStreams = /* values here */ +``` + diff --git a/docs/models/sourceyounium.md b/docs/models/sourceyounium.md new file mode 100644 index 00000000..d426439c --- /dev/null +++ b/docs/models/sourceyounium.md @@ -0,0 +1,12 @@ +# SourceYounium + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- | +| `legal_entity` | *str* | :heavy_check_mark: | Legal Entity that data should be pulled from | +| `password` | *str* | :heavy_check_mark: | Account password for younium account API key | +| `username` | *str* | :heavy_check_mark: | Username for Younium account | +| `playground` | *Optional[bool]* | :heavy_minus_sign: | Property defining if connector is used against playground or production environment | +| `source_type` | [models.Younium](../models/younium.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceyousign.md b/docs/models/sourceyousign.md new file mode 100644 index 00000000..546b40b6 --- /dev/null +++ b/docs/models/sourceyousign.md @@ -0,0 +1,12 @@ +# SourceYousign + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | +| `api_key` | *str* | :heavy_check_mark: | API key or access token | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `limit` | *Optional[str]* | :heavy_minus_sign: | Limit for each response objects | +| `source_type` | [models.Yousign](../models/yousign.md) | :heavy_check_mark: | N/A | +| `subdomain` | [Optional[models.SourceYousignSubdomain]](../models/sourceyousignsubdomain.md) | :heavy_minus_sign: | The subdomain for the Yousign API environment, such as 'sandbox' or 'api'. | \ No newline at end of file diff --git a/docs/models/sourceyousignsubdomain.md b/docs/models/sourceyousignsubdomain.md new file mode 100644 index 00000000..dd5baaab --- /dev/null +++ b/docs/models/sourceyousignsubdomain.md @@ -0,0 +1,11 @@ +# SourceYousignSubdomain + +The subdomain for the Yousign API environment, such as 'sandbox' or 'api'. + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `API_SANDBOX` | api-sandbox | +| `API` | api | \ No newline at end of file diff --git a/docs/models/sourcezapsign.md b/docs/models/sourcezapsign.md new file mode 100644 index 00000000..160e9264 --- /dev/null +++ b/docs/models/sourcezapsign.md @@ -0,0 +1,11 @@ +# SourceZapsign + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `api_token` | *str* | :heavy_check_mark: | Your static API token for authentication. You can find it in your ZapSign account under the 'Settings' or 'API' section. For more details, refer to the [Getting Started](https://docs.zapsign.com.br/english/getting-started#how-do-i-get-my-api-token) guide. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `signer_ids` | List[*Any*] | :heavy_minus_sign: | The signer ids for signer stream | +| `source_type` | [models.Zapsign](../models/zapsign.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcezendeskchat.md b/docs/models/sourcezendeskchat.md index 9b7e58e0..f51b13d6 100644 --- a/docs/models/sourcezendeskchat.md +++ b/docs/models/sourcezendeskchat.md @@ -3,9 +3,9 @@ ## Fields -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | The date from which you'd like to replicate data for Zendesk Chat API, in the format YYYY-MM-DDT00:00:00Z. | 2021-02-01T00:00:00Z | -| `credentials` | [Optional[models.SourceZendeskChatAuthorizationMethod]](../models/sourcezendeskchatauthorizationmethod.md) | :heavy_minus_sign: | N/A | | -| `source_type` | [models.SourceZendeskChatZendeskChat](../models/sourcezendeskchatzendeskchat.md) | :heavy_check_mark: | N/A | | -| `subdomain` | *Optional[str]* | :heavy_minus_sign: | The unique subdomain of your Zendesk account (without https://). See the Zendesk docs to find your subdomain | myzendeskchat | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | The date from which you'd like to replicate data for Zendesk Chat API, in the format YYYY-MM-DDT00:00:00Z. | 2021-02-01T00:00:00Z | +| `subdomain` | *str* | :heavy_check_mark: | The unique subdomain of your Zendesk account (without https://). See the Zendesk docs to find your subdomain. | myzendeskchat | +| `credentials` | [Optional[models.SourceZendeskChatAuthorizationMethod]](../models/sourcezendeskchatauthorizationmethod.md) | :heavy_minus_sign: | N/A | | +| `source_type` | [models.ZendeskChat](../models/zendeskchat.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/spotifyads.md b/docs/models/spotifyads.md new file mode 100644 index 00000000..d6530766 --- /dev/null +++ b/docs/models/spotifyads.md @@ -0,0 +1,8 @@ +# SpotifyAds + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `SPOTIFY_ADS` | spotify-ads | \ No newline at end of file diff --git a/docs/models/sshkeyauthentication.md b/docs/models/sshkeyauthentication.md index c865955b..7cadf79e 100644 --- a/docs/models/sshkeyauthentication.md +++ b/docs/models/sshkeyauthentication.md @@ -1,12 +1,15 @@ # SSHKeyAuthentication +Connect through a jump server tunnel host using username and ssh key + ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | -| `ssh_key` | *str* | :heavy_check_mark: | OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa ) | | -| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | | -| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host. | | -| `tunnel_method` | [models.DestinationClickhouseTunnelMethod](../models/destinationclickhousetunnelmethod.md) | :heavy_check_mark: | Connect through a jump server tunnel host using username and ssh key | | -| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | 22 | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | +| `ssh_key` | *str* | :heavy_check_mark: | OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa ) | +| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | +| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `tunnel_method` | [Optional[models.DestinationClickhouseTunnelMethod]](../models/destinationclickhousetunnelmethod.md) | :heavy_minus_sign: | N/A | +| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | \ No newline at end of file diff --git a/docs/models/statefilterenum.md b/docs/models/statefilterenum.md deleted file mode 100644 index c2e15753..00000000 --- a/docs/models/statefilterenum.md +++ /dev/null @@ -1,12 +0,0 @@ -# StateFilterEnum - -An enumeration. - - -## Values - -| Name | Value | -| ---------- | ---------- | -| `ENABLED` | enabled | -| `PAUSED` | paused | -| `ARCHIVED` | archived | \ No newline at end of file diff --git a/docs/models/storageconfig.md b/docs/models/storageconfig.md deleted file mode 100644 index 37c1386f..00000000 --- a/docs/models/storageconfig.md +++ /dev/null @@ -1,19 +0,0 @@ -# StorageConfig - -Storage config of Iceberg. - - -## Supported Types - -### `models.DestinationIcebergS3` - -```python -value: models.DestinationIcebergS3 = /* values here */ -``` - -### `models.ServerManaged` - -```python -value: models.ServerManaged = /* values here */ -``` - diff --git a/docs/models/storagetype.md b/docs/models/storagetype.md index 47e50aa7..434ce0df 100644 --- a/docs/models/storagetype.md +++ b/docs/models/storagetype.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ----- | ----- | -| `S3` | S3 | \ No newline at end of file +| Name | Value | +| ------ | ------ | +| `NONE` | None | \ No newline at end of file diff --git a/docs/models/streamconfiguration.md b/docs/models/streamconfiguration.md index 6f95d0f7..9d86f749 100644 --- a/docs/models/streamconfiguration.md +++ b/docs/models/streamconfiguration.md @@ -9,7 +9,10 @@ Configurations for a single stream. | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `name` | *str* | :heavy_check_mark: | N/A | | `cursor_field` | List[*str*] | :heavy_minus_sign: | Path to the field that will be used to determine if a record is new or modified since the last sync. This field is REQUIRED if `sync_mode` is `incremental` unless there is a default. | +| `destination_object_name` | *Optional[str]* | :heavy_minus_sign: | The name of the destination object that this stream will be written to, used for data activation destinations. | +| `include_files` | *Optional[bool]* | :heavy_minus_sign: | Whether to move raw files from the source to the destination during the sync. | | `mappers` | List[[models.ConfiguredStreamMapper](../models/configuredstreammapper.md)] | :heavy_minus_sign: | Mappers that should be applied to the stream before writing to the destination. | +| `namespace` | *Optional[str]* | :heavy_minus_sign: | Namespace of the stream. | | `primary_key` | List[List[*str*]] | :heavy_minus_sign: | Paths to the fields that will be used as primary key. This field is REQUIRED if `destination_sync_mode` is `*_dedup` unless it is already supplied by the source schema. | | `selected_fields` | List[[models.SelectedFieldInfo](../models/selectedfieldinfo.md)] | :heavy_minus_sign: | Paths to the fields that will be included in the configured catalog. | | `sync_mode` | [Optional[models.ConnectionSyncModeEnum]](../models/connectionsyncmodeenum.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/streamconfigurationsinput.md b/docs/models/streamconfigurationsinput.md new file mode 100644 index 00000000..71308eeb --- /dev/null +++ b/docs/models/streamconfigurationsinput.md @@ -0,0 +1,10 @@ +# StreamConfigurationsInput + +A list of configured stream options for a connection. + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `streams` | List[[models.StreamConfiguration](../models/streamconfiguration.md)] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/streamnameoverrides.md b/docs/models/streamnameoverrides.md new file mode 100644 index 00000000..8153b98e --- /dev/null +++ b/docs/models/streamnameoverrides.md @@ -0,0 +1,9 @@ +# StreamNameOverrides + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------- | --------------------------------------------------------------------------- | --------------------------------------------------------------------------- | --------------------------------------------------------------------------- | +| `custom_stream_name` | *str* | :heavy_check_mark: | The name you want this stream to appear as in Airbyte and your destination. | +| `source_stream_name` | *str* | :heavy_check_mark: | The exact name of the sheet/tab in your Google Spreadsheet. | \ No newline at end of file diff --git a/docs/models/streamproperties.md b/docs/models/streamproperties.md index d10968ed..54bc3fec 100644 --- a/docs/models/streamproperties.md +++ b/docs/models/streamproperties.md @@ -12,4 +12,5 @@ The stream properties associated with a connection. | `source_defined_cursor_field` | *Optional[bool]* | :heavy_minus_sign: | N/A | | `source_defined_primary_key` | List[List[*str*]] | :heavy_minus_sign: | N/A | | `stream_name` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `streamnamespace` | *Optional[str]* | :heavy_minus_sign: | N/A | | `sync_modes` | List[[models.ConnectionSyncModeEnum](../models/connectionsyncmodeenum.md)] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/subdomain.md b/docs/models/subdomain.md new file mode 100644 index 00000000..8fb95a38 --- /dev/null +++ b/docs/models/subdomain.md @@ -0,0 +1,11 @@ +# Subdomain + +The subdomain for the Nexio API environment, such as 'nexiopaysandbox' or 'nexiopay'. + + +## Values + +| Name | Value | +| ----------------- | ----------------- | +| `NEXIOPAYSANDBOX` | nexiopaysandbox | +| `NEXIOPAY` | nexiopay | \ No newline at end of file diff --git a/docs/models/subtitleformat.md b/docs/models/subtitleformat.md new file mode 100644 index 00000000..2a8ace35 --- /dev/null +++ b/docs/models/subtitleformat.md @@ -0,0 +1,11 @@ +# SubtitleFormat + +The subtitle format for transcript_subtitle stream + + +## Values + +| Name | Value | +| ----- | ----- | +| `VTT` | vtt | +| `SRT` | srt | \ No newline at end of file diff --git a/docs/models/surrealdb.md b/docs/models/surrealdb.md new file mode 100644 index 00000000..2f35751c --- /dev/null +++ b/docs/models/surrealdb.md @@ -0,0 +1,8 @@ +# Surrealdb + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `SURREALDB` | surrealdb | \ No newline at end of file diff --git a/docs/models/svix.md b/docs/models/svix.md new file mode 100644 index 00000000..3404892a --- /dev/null +++ b/docs/models/svix.md @@ -0,0 +1,8 @@ +# Svix + + +## Values + +| Name | Value | +| ------ | ------ | +| `SVIX` | svix | \ No newline at end of file diff --git a/docs/models/tag.md b/docs/models/tag.md new file mode 100644 index 00000000..c7cb9af2 --- /dev/null +++ b/docs/models/tag.md @@ -0,0 +1,13 @@ +# Tag + +A tag that can be associated with a connection. Useful for grouping and organizing connections in a workspace. + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `color` | *str* | :heavy_check_mark: | N/A | +| `name` | *str* | :heavy_check_mark: | N/A | +| `tag_id` | *str* | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/tagcreaterequest.md b/docs/models/tagcreaterequest.md new file mode 100644 index 00000000..b26e2114 --- /dev/null +++ b/docs/models/tagcreaterequest.md @@ -0,0 +1,10 @@ +# TagCreateRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `color` | *str* | :heavy_check_mark: | N/A | +| `name` | *str* | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/tagpatchrequest.md b/docs/models/tagpatchrequest.md new file mode 100644 index 00000000..463e1469 --- /dev/null +++ b/docs/models/tagpatchrequest.md @@ -0,0 +1,9 @@ +# TagPatchRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `color` | *str* | :heavy_check_mark: | N/A | +| `name` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/tagresponse.md b/docs/models/tagresponse.md new file mode 100644 index 00000000..a2685ee7 --- /dev/null +++ b/docs/models/tagresponse.md @@ -0,0 +1,13 @@ +# TagResponse + +Provides details of a single tag. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------- | ------------------------- | ------------------------- | ------------------------- | +| `color` | *str* | :heavy_check_mark: | A hexadecimal color value | +| `name` | *str* | :heavy_check_mark: | N/A | +| `tag_id` | *str* | :heavy_check_mark: | N/A | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/tagsresponse.md b/docs/models/tagsresponse.md new file mode 100644 index 00000000..8865e0cb --- /dev/null +++ b/docs/models/tagsresponse.md @@ -0,0 +1,8 @@ +# TagsResponse + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | +| `data` | List[[models.TagResponse](../models/tagresponse.md)] | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/tavus.md b/docs/models/tavus.md new file mode 100644 index 00000000..c2678ca4 --- /dev/null +++ b/docs/models/tavus.md @@ -0,0 +1,8 @@ +# Tavus + + +## Values + +| Name | Value | +| ------- | ------- | +| `TAVUS` | tavus | \ No newline at end of file diff --git a/docs/models/td2.md b/docs/models/td2.md new file mode 100644 index 00000000..e8cf8981 --- /dev/null +++ b/docs/models/td2.md @@ -0,0 +1,10 @@ +# Td2 + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | +| `password` | *str* | :heavy_check_mark: | Enter the password associated with the username. | +| `username` | *str* | :heavy_check_mark: | Username to use to access the database. | +| `auth_type` | [Optional[models.DestinationTeradataAuthType]](../models/destinationteradataauthtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/thetargetedactionresourceforthefetch.md b/docs/models/thetargetedactionresourceforthefetch.md new file mode 100644 index 00000000..407a3b80 --- /dev/null +++ b/docs/models/thetargetedactionresourceforthefetch.md @@ -0,0 +1,11 @@ +# TheTargetedActionResourceForTheFetch + +Note - Different targets have different attribute enum requirements, please refer actions sections in https://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/Welcome.html + + +## Values + +| Name | Value | +| ---------------------- | ---------------------- | +| `GET_QUEUE_ATTRIBUTES` | GetQueueAttributes | +| `RECEIVE_MESSAGE` | ReceiveMessage | \ No newline at end of file diff --git a/docs/models/thinkificcourses.md b/docs/models/thinkificcourses.md new file mode 100644 index 00000000..63adf1a7 --- /dev/null +++ b/docs/models/thinkificcourses.md @@ -0,0 +1,8 @@ +# ThinkificCourses + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `THINKIFIC_COURSES` | thinkific-courses | \ No newline at end of file diff --git a/docs/models/thrivelearning.md b/docs/models/thrivelearning.md new file mode 100644 index 00000000..7b074314 --- /dev/null +++ b/docs/models/thrivelearning.md @@ -0,0 +1,8 @@ +# ThriveLearning + + +## Values + +| Name | Value | +| ----------------- | ----------------- | +| `THRIVE_LEARNING` | thrive-learning | \ No newline at end of file diff --git a/docs/models/timezone.md b/docs/models/timezone.md new file mode 100644 index 00000000..aa2159e3 --- /dev/null +++ b/docs/models/timezone.md @@ -0,0 +1,11 @@ +# TimeZone + +The timezone for the reporting data. Use 'ORTZ' for Organization Time Zone or 'UTC' for Coordinated Universal Time. Default is UTC. + + +## Values + +| Name | Value | +| ------ | ------ | +| `ORTZ` | ORTZ | +| `UTC` | UTC | \ No newline at end of file diff --git a/docs/models/tmdb.md b/docs/models/tmdb.md new file mode 100644 index 00000000..92df3d79 --- /dev/null +++ b/docs/models/tmdb.md @@ -0,0 +1,8 @@ +# Tmdb + + +## Values + +| Name | Value | +| ------ | ------ | +| `TMDB` | tmdb | \ No newline at end of file diff --git a/docs/models/toggl.md b/docs/models/toggl.md new file mode 100644 index 00000000..08c1a341 --- /dev/null +++ b/docs/models/toggl.md @@ -0,0 +1,8 @@ +# Toggl + + +## Values + +| Name | Value | +| ------- | ------- | +| `TOGGL` | toggl | \ No newline at end of file diff --git a/docs/models/tokenbasedauthentication.md b/docs/models/tokenbasedauthentication.md new file mode 100644 index 00000000..789accca --- /dev/null +++ b/docs/models/tokenbasedauthentication.md @@ -0,0 +1,15 @@ +# TokenBasedAuthentication + +Authenticate using a token-based authentication method. This requires a consumer key and secret, as well as a token ID and secret. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `client_id` | *str* | :heavy_check_mark: | The consumer key used for token-based authentication. This is generated in NetSuite when creating an integration record. | +| `client_secret` | *str* | :heavy_check_mark: | The consumer secret used for token-based authentication. This is generated in NetSuite when creating an integration record. | +| `token_id` | *str* | :heavy_check_mark: | The token ID used for token-based authentication. This is generated in NetSuite when creating a token-based role. | +| `token_secret` | *str* | :heavy_check_mark: | The token secret used for token-based authentication. This is generated in NetSuite when creating a token-based role.Ensure to keep this value secure. | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `authentication_method` | [Optional[models.SourceNetsuiteEnterpriseSchemasAuthenticationMethod]](../models/sourcenetsuiteenterpriseschemasauthenticationmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/transformationqueryruntype.md b/docs/models/transformationqueryruntype.md deleted file mode 100644 index b6c7d228..00000000 --- a/docs/models/transformationqueryruntype.md +++ /dev/null @@ -1,11 +0,0 @@ -# TransformationQueryRunType - -Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly. - - -## Values - -| Name | Value | -| ------------- | ------------- | -| `INTERACTIVE` | interactive | -| `BATCH` | batch | \ No newline at end of file diff --git a/docs/models/tunnelmethod.md b/docs/models/tunnelmethod.md index 57efeb60..65519238 100644 --- a/docs/models/tunnelmethod.md +++ b/docs/models/tunnelmethod.md @@ -1,7 +1,5 @@ # TunnelMethod -No ssh tunnel needed to connect to database - ## Values diff --git a/docs/models/tyntecsms.md b/docs/models/tyntecsms.md new file mode 100644 index 00000000..27dfcc17 --- /dev/null +++ b/docs/models/tyntecsms.md @@ -0,0 +1,8 @@ +# TyntecSms + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `TYNTEC_SMS` | tyntec-sms | \ No newline at end of file diff --git a/docs/models/type.md b/docs/models/type.md new file mode 100644 index 00000000..8623fb10 --- /dev/null +++ b/docs/models/type.md @@ -0,0 +1,8 @@ +# Type + + +## Values + +| Name | Value | +| -------- | -------- | +| `O_AUTH` | OAuth | \ No newline at end of file diff --git a/docs/models/unencrypted.md b/docs/models/unencrypted.md index 5790f31b..b44facdd 100644 --- a/docs/models/unencrypted.md +++ b/docs/models/unencrypted.md @@ -5,6 +5,7 @@ The data transfer will not be encrypted. ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | -| `ssl_method` | [Optional[models.DestinationMssqlSslMethod]](../models/destinationmssqlsslmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `name` | [Optional[models.Name]](../models/name.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/unstructureddocumentformat.md b/docs/models/unstructureddocumentformat.md index b4cd93de..37591f6d 100644 --- a/docs/models/unstructureddocumentformat.md +++ b/docs/models/unstructureddocumentformat.md @@ -7,7 +7,7 @@ Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one rec | Field | Type | Required | Description | | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `filetype` | [Optional[models.SourceGcsSchemasStreamsFormatFormatFiletype]](../models/sourcegcsschemasstreamsformatformatfiletype.md) | :heavy_minus_sign: | N/A | -| `processing` | [Optional[models.SourceGcsProcessing]](../models/sourcegcsprocessing.md) | :heavy_minus_sign: | Processing configuration | +| `filetype` | [Optional[models.SourceAzureBlobStorageSchemasStreamsFormatFiletype]](../models/sourceazureblobstorageschemasstreamsformatfiletype.md) | :heavy_minus_sign: | N/A | +| `processing` | [Optional[models.Processing]](../models/processing.md) | :heavy_minus_sign: | Processing configuration | | `skip_unprocessable_files` | *Optional[bool]* | :heavy_minus_sign: | If true, skip files that cannot be parsed and pass the error message along as the _ab_source_file_parse_error field. If false, fail the sync. | -| `strategy` | [Optional[models.SourceGcsParsingStrategy]](../models/sourcegcsparsingstrategy.md) | :heavy_minus_sign: | The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf | \ No newline at end of file +| `strategy` | [Optional[models.ParsingStrategy]](../models/parsingstrategy.md) | :heavy_minus_sign: | The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf | \ No newline at end of file diff --git a/docs/models/updatedeclarativesourcedefinitionrequest.md b/docs/models/updatedeclarativesourcedefinitionrequest.md new file mode 100644 index 00000000..6bd06819 --- /dev/null +++ b/docs/models/updatedeclarativesourcedefinitionrequest.md @@ -0,0 +1,8 @@ +# UpdateDeclarativeSourceDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------- | --------------------------------- | --------------------------------- | --------------------------------- | +| `manifest` | *Any* | :heavy_check_mark: | Low code CDK manifest JSON object | \ No newline at end of file diff --git a/docs/models/updatedefinitionrequest.md b/docs/models/updatedefinitionrequest.md new file mode 100644 index 00000000..86034944 --- /dev/null +++ b/docs/models/updatedefinitionrequest.md @@ -0,0 +1,9 @@ +# UpdateDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `docker_image_tag` | *str* | :heavy_check_mark: | N/A | +| `name` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/uptick.md b/docs/models/uptick.md new file mode 100644 index 00000000..fb02fc14 --- /dev/null +++ b/docs/models/uptick.md @@ -0,0 +1,8 @@ +# Uptick + + +## Values + +| Name | Value | +| -------- | -------- | +| `UPTICK` | uptick | \ No newline at end of file diff --git a/docs/models/validactionbreakdowns.md b/docs/models/validactionbreakdowns.md index f51cd943..9a93fda7 100644 --- a/docs/models/validactionbreakdowns.md +++ b/docs/models/validactionbreakdowns.md @@ -17,4 +17,8 @@ An enumeration. | `ACTION_TYPE` | action_type | | `ACTION_VIDEO_SOUND` | action_video_sound | | `ACTION_VIDEO_TYPE` | action_video_type | +| `CONVERSION_DESTINATION` | conversion_destination | +| `MATCHED_PERSONA_ID` | matched_persona_id | +| `MATCHED_PERSONA_NAME` | matched_persona_name | +| `SIGNAL_SOURCE_BUCKET` | signal_source_bucket | | `STANDARD_EVENT_CONTENT_TYPE` | standard_event_content_type | \ No newline at end of file diff --git a/docs/models/validbreakdowns.md b/docs/models/validbreakdowns.md index afc8fd59..654d6ac7 100644 --- a/docs/models/validbreakdowns.md +++ b/docs/models/validbreakdowns.md @@ -11,8 +11,10 @@ An enumeration. | `AGE` | age | | `APP_ID` | app_id | | `BODY_ASSET` | body_asset | +| `BREAKDOWN_REPORTING_AD_ID` | breakdown_reporting_ad_id | | `CALL_TO_ACTION_ASSET` | call_to_action_asset | | `COARSE_CONVERSION_VALUE` | coarse_conversion_value | +| `CONVERSION_DESTINATION` | conversion_destination | | `COUNTRY` | country | | `DESCRIPTION_ASSET` | description_asset | | `DEVICE_PLATFORM` | device_platform | @@ -26,6 +28,7 @@ An enumeration. | `IMAGE_ASSET` | image_asset | | `IMPRESSION_DEVICE` | impression_device | | `IS_CONVERSION_ID_MODELED` | is_conversion_id_modeled | +| `IS_RENDERED_AS_DELAYED_SKIP_AD` | is_rendered_as_delayed_skip_ad | | `LANDING_DESTINATION` | landing_destination | | `LINK_URL_ASSET` | link_url_asset | | `MARKETING_MESSAGES_BTN_NAME` | marketing_messages_btn_name | @@ -36,6 +39,7 @@ An enumeration. | `MEDIA_FORMAT` | media_format | | `MEDIA_ORIGIN_URL` | media_origin_url | | `MEDIA_TEXT_CONTENT` | media_text_content | +| `MEDIA_TYPE` | media_type | | `MMM` | mmm | | `PLACE_PAGE_ID` | place_page_id | | `PLATFORM_POSITION` | platform_position | @@ -44,9 +48,17 @@ An enumeration. | `PUBLISHER_PLATFORM` | publisher_platform | | `REDOWNLOAD` | redownload | | `REGION` | region | +| `SIGNAL_SOURCE_BUCKET` | signal_source_bucket | | `SKAN_CAMPAIGN_ID` | skan_campaign_id | | `SKAN_CONVERSION_ID` | skan_conversion_id | | `SKAN_VERSION` | skan_version | +| `SOT_ATTRIBUTION_MODEL_TYPE` | sot_attribution_model_type | +| `SOT_ATTRIBUTION_WINDOW` | sot_attribution_window | +| `SOT_CHANNEL` | sot_channel | +| `SOT_EVENT_TYPE` | sot_event_type | +| `SOT_SOURCE` | sot_source | | `STANDARD_EVENT_CONTENT_TYPE` | standard_event_content_type | | `TITLE_ASSET` | title_asset | +| `USER_PERSONA_ID` | user_persona_id | +| `USER_PERSONA_NAME` | user_persona_name | | `VIDEO_ASSET` | video_asset | \ No newline at end of file diff --git a/docs/models/verifyca.md b/docs/models/verifyca.md index 42724e7d..a5fa485a 100644 --- a/docs/models/verifyca.md +++ b/docs/models/verifyca.md @@ -5,8 +5,8 @@ Verify-ca SSL mode. ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | -| `ca_certificate` | *str* | :heavy_check_mark: | CA certificate | -| `client_key_password` | *Optional[str]* | :heavy_minus_sign: | Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically. | -| `mode` | [Optional[models.DestinationPostgresSchemasSSLModeSSLModesMode]](../models/destinationpostgresschemassslmodesslmodesmode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | +| `ca_certificate` | *str* | :heavy_check_mark: | CA certificate | +| `client_key_password` | *Optional[str]* | :heavy_minus_sign: | Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically. | +| `mode` | [Optional[models.DestinationPostgresSchemasSSLModeSSLModes5Mode]](../models/destinationpostgresschemassslmodesslmodes5mode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/verifyidentity.md b/docs/models/verifyidentity.md index 0bc95a00..066f2793 100644 --- a/docs/models/verifyidentity.md +++ b/docs/models/verifyidentity.md @@ -1,14 +1,15 @@ # VerifyIdentity -Always connect with SSL. Verify both CA and Hostname. +To always require encryption and verify that the source has a valid SSL certificate. ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -| `ca_certificate` | *str* | :heavy_check_mark: | CA certificate | -| `client_certificate` | *Optional[str]* | :heavy_minus_sign: | Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well) | -| `client_key` | *Optional[str]* | :heavy_minus_sign: | Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well) | -| `client_key_password` | *Optional[str]* | :heavy_minus_sign: | Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically. | -| `mode` | [models.SourceMysqlSchemasSSLModeSSLModesMode](../models/sourcemysqlschemassslmodesslmodesmode.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------- | +| `ca_certificate` | *str* | :heavy_check_mark: | CA certificate | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `client_certificate` | *Optional[str]* | :heavy_minus_sign: | Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well) | +| `client_key` | *Optional[str]* | :heavy_minus_sign: | Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well) | +| `client_key_password` | *Optional[str]* | :heavy_minus_sign: | Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically. | +| `mode` | [Optional[models.SourceMysqlSchemasSslModeEncryptionMode]](../models/sourcemysqlschemassslmodeencryptionmode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/watchmode.md b/docs/models/watchmode.md new file mode 100644 index 00000000..66559eb7 --- /dev/null +++ b/docs/models/watchmode.md @@ -0,0 +1,8 @@ +# Watchmode + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `WATCHMODE` | watchmode | \ No newline at end of file diff --git a/docs/models/webhooknotificationconfig.md b/docs/models/webhooknotificationconfig.md new file mode 100644 index 00000000..85a599e1 --- /dev/null +++ b/docs/models/webhooknotificationconfig.md @@ -0,0 +1,11 @@ +# WebhookNotificationConfig + +Configures a webhook notification. + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `enabled` | *Optional[bool]* | :heavy_minus_sign: | N/A | +| `url` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/workday.md b/docs/models/workday.md new file mode 100644 index 00000000..c3060821 --- /dev/null +++ b/docs/models/workday.md @@ -0,0 +1,8 @@ +# Workday + + +## Values + +| Name | Value | +| --------- | --------- | +| `WORKDAY` | workday | \ No newline at end of file diff --git a/docs/models/workspacecreaterequest.md b/docs/models/workspacecreaterequest.md index 3cf73fc3..9e87f139 100644 --- a/docs/models/workspacecreaterequest.md +++ b/docs/models/workspacecreaterequest.md @@ -3,7 +3,9 @@ ## Fields -| Field | Type | Required | Description | -| --------------------------------------- | --------------------------------------- | --------------------------------------- | --------------------------------------- | -| `name` | *str* | :heavy_check_mark: | Name of the workspace | -| `organization_id` | *Optional[str]* | :heavy_minus_sign: | ID of organization to add workspace to. | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | +| `name` | *str* | :heavy_check_mark: | Name of the workspace | +| `notifications` | [Optional[models.NotificationsConfig]](../models/notificationsconfig.md) | :heavy_minus_sign: | Configures workspace notifications. | +| `organization_id` | *Optional[str]* | :heavy_minus_sign: | ID of organization to add workspace to. | +| `region_id` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/workspaceoauthcredentialsrequest.md b/docs/models/workspaceoauthcredentialsrequest.md index 3f96cb8d..16f760e2 100644 --- a/docs/models/workspaceoauthcredentialsrequest.md +++ b/docs/models/workspaceoauthcredentialsrequest.md @@ -5,8 +5,8 @@ POST body for creating/updating workspace level OAuth credentials ## Fields -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | -| `actor_type` | [models.ActorTypeEnum](../models/actortypeenum.md) | :heavy_check_mark: | Whether you're setting this override for a source or destination | | -| `configuration` | [models.OAuthCredentialsConfiguration](../models/oauthcredentialsconfiguration.md) | :heavy_check_mark: | The values required to configure the source. | {
"user": "charles"
} | -| `name` | [models.OAuthActorNames](../models/oauthactornames.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | +| `actor_type` | [models.ActorTypeEnum](../models/actortypeenum.md) | :heavy_check_mark: | Whether you're setting this override for a source or destination | +| `configuration` | *Any* | :heavy_check_mark: | The values required to configure the source. | +| `name` | [models.OAuthActorNames](../models/oauthactornames.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/workspaceresponse.md b/docs/models/workspaceresponse.md index 6cb7b18a..f97a429d 100644 --- a/docs/models/workspaceresponse.md +++ b/docs/models/workspaceresponse.md @@ -5,8 +5,9 @@ Provides details of a single workspace. ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| `name` | *str* | :heavy_check_mark: | N/A | -| `workspace_id` | *str* | :heavy_check_mark: | N/A | -| `data_residency` | [Optional[models.GeographyEnum]](../models/geographyenum.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | +| `data_residency` | *str* | :heavy_check_mark: | N/A | +| `name` | *str* | :heavy_check_mark: | N/A | +| `notifications` | [models.NotificationsConfig](../models/notificationsconfig.md) | :heavy_check_mark: | Configures workspace notifications. | +| `workspace_id` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/workspaceupdaterequest.md b/docs/models/workspaceupdaterequest.md index e02e7560..3eed692c 100644 --- a/docs/models/workspaceupdaterequest.md +++ b/docs/models/workspaceupdaterequest.md @@ -3,6 +3,8 @@ ## Fields -| Field | Type | Required | Description | -| --------------------- | --------------------- | --------------------- | --------------------- | -| `name` | *str* | :heavy_check_mark: | Name of the workspace | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | +| `name` | *Optional[str]* | :heavy_minus_sign: | Name of the workspace | +| `notifications` | [Optional[models.NotificationsConfig]](../models/notificationsconfig.md) | :heavy_minus_sign: | Configures workspace notifications. | +| `region_id` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/younium.md b/docs/models/younium.md new file mode 100644 index 00000000..7080434a --- /dev/null +++ b/docs/models/younium.md @@ -0,0 +1,8 @@ +# Younium + + +## Values + +| Name | Value | +| --------- | --------- | +| `YOUNIUM` | younium | \ No newline at end of file diff --git a/docs/models/yousign.md b/docs/models/yousign.md new file mode 100644 index 00000000..5ac74125 --- /dev/null +++ b/docs/models/yousign.md @@ -0,0 +1,8 @@ +# Yousign + + +## Values + +| Name | Value | +| --------- | --------- | +| `YOUSIGN` | yousign | \ No newline at end of file diff --git a/docs/models/zapsign.md b/docs/models/zapsign.md new file mode 100644 index 00000000..4ec3bf5e --- /dev/null +++ b/docs/models/zapsign.md @@ -0,0 +1,8 @@ +# Zapsign + + +## Values + +| Name | Value | +| --------- | --------- | +| `ZAPSIGN` | zapsign | \ No newline at end of file diff --git a/docs/models/zendeskchat.md b/docs/models/zendeskchat.md index 5e633975..820912b7 100644 --- a/docs/models/zendeskchat.md +++ b/docs/models/zendeskchat.md @@ -1,8 +1,8 @@ # ZendeskChat -## Fields +## Values -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | -| `credentials` | [Optional[models.ZendeskChatCredentials]](../models/zendeskchatcredentials.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Name | Value | +| -------------- | -------------- | +| `ZENDESK_CHAT` | zendesk-chat | \ No newline at end of file diff --git a/docs/models/zendeskchatcredentials.md b/docs/models/zendeskchatcredentials.md deleted file mode 100644 index e64656a0..00000000 --- a/docs/models/zendeskchatcredentials.md +++ /dev/null @@ -1,9 +0,0 @@ -# ZendeskChatCredentials - - -## Fields - -| Field | Type | Required | Description | -| -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | -------------------------------------------- | -| `client_id` | *Optional[str]* | :heavy_minus_sign: | The Client ID of your OAuth application | -| `client_secret` | *Optional[str]* | :heavy_minus_sign: | The Client Secret of your OAuth application. | \ No newline at end of file diff --git a/docs/sdks/connections/README.md b/docs/sdks/connections/README.md index 828c9f39..0165f695 100644 --- a/docs/sdks/connections/README.md +++ b/docs/sdks/connections/README.md @@ -35,7 +35,6 @@ res = s.connections.create_connection(request=models.ConnectionCreateRequest( destination_id='e478de0d-a3a0-475c-b019-25f7dd29e281', source_id='95e66a59-8045-4307-9678-63bc3c9b8c93', name='Postgres-to-Bigquery', - namespace_format='${SOURCE_NAMESPACE}', )) if res.connection_response is not None: @@ -219,7 +218,6 @@ s = airbyte_api.AirbyteAPI( res = s.connections.patch_connection(request=api.PatchConnectionRequest( connection_patch_request=models.ConnectionPatchRequest( name='Postgres-to-Bigquery', - namespace_format='${SOURCE_NAMESPACE}', ), connection_id='', )) diff --git a/docs/sdks/declarativesourcedefinitions/README.md b/docs/sdks/declarativesourcedefinitions/README.md new file mode 100644 index 00000000..818884cd --- /dev/null +++ b/docs/sdks/declarativesourcedefinitions/README.md @@ -0,0 +1,252 @@ +# DeclarativeSourceDefinitions +(*declarative_source_definitions*) + +## Overview + +### Available Operations + +* [create_declarative_source_definition](#create_declarative_source_definition) - Create a declarative source definition. +* [delete_declarative_source_definition](#delete_declarative_source_definition) - Delete a declarative source definition. +* [get_declarative_source_definition](#get_declarative_source_definition) - Get declarative source definition details. +* [list_declarative_source_definitions](#list_declarative_source_definitions) - List declarative source definitions. +* [update_declarative_source_definition](#update_declarative_source_definition) - Update declarative source definition details. + +## create_declarative_source_definition + +Create a declarative source definition. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.declarative_source_definitions.create_declarative_source_definition(request=api.CreateDeclarativeSourceDefinitionRequest( + create_declarative_source_definition_request=models.CreateDeclarativeSourceDefinitionRequest( + manifest='', + name='', + ), + workspace_id='2d054f48-a68c-4d16-b04d-bb444d47c285', +)) + +if res.declarative_source_definition_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | +| `request` | [api.CreateDeclarativeSourceDefinitionRequest](../../api/createdeclarativesourcedefinitionrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.CreateDeclarativeSourceDefinitionResponse](../../api/createdeclarativesourcedefinitionresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## delete_declarative_source_definition + +Delete a declarative source definition. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.declarative_source_definitions.delete_declarative_source_definition(request=api.DeleteDeclarativeSourceDefinitionRequest( + definition_id='26cd06ea-5caa-47b9-98a2-1d217049557d', + workspace_id='f7cdc65f-5255-43d5-a6be-8fee673091f3', +)) + +if res.declarative_source_definition_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | +| `request` | [api.DeleteDeclarativeSourceDefinitionRequest](../../api/deletedeclarativesourcedefinitionrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.DeleteDeclarativeSourceDefinitionResponse](../../api/deletedeclarativesourcedefinitionresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## get_declarative_source_definition + +Get declarative source definition details. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.declarative_source_definitions.get_declarative_source_definition(request=api.GetDeclarativeSourceDefinitionRequest( + definition_id='a003b7d3-efd4-4d7e-8ea6-469e9fe7871f', + workspace_id='3855d0f6-8cfb-44c2-ac49-0c3965c034bd', +)) + +if res.declarative_source_definition_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ----------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------- | +| `request` | [api.GetDeclarativeSourceDefinitionRequest](../../api/getdeclarativesourcedefinitionrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.GetDeclarativeSourceDefinitionResponse](../../api/getdeclarativesourcedefinitionresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## list_declarative_source_definitions + +List declarative source definitions. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.declarative_source_definitions.list_declarative_source_definitions(request=api.ListDeclarativeSourceDefinitionsRequest( + workspace_id='23bc0a4f-72b3-4d91-abe3-3f32d8a49dfc', +)) + +if res.declarative_source_definitions_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| --------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | +| `request` | [api.ListDeclarativeSourceDefinitionsRequest](../../api/listdeclarativesourcedefinitionsrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.ListDeclarativeSourceDefinitionsResponse](../../api/listdeclarativesourcedefinitionsresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## update_declarative_source_definition + +Update declarative source definition details. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.declarative_source_definitions.update_declarative_source_definition(request=api.UpdateDeclarativeSourceDefinitionRequest( + update_declarative_source_definition_request=models.UpdateDeclarativeSourceDefinitionRequest( + manifest='', + ), + definition_id='66066427-c293-4cbf-b72e-b31a72a46545', + workspace_id='87f1ccdb-71b2-401c-8f60-cac1f2a2da80', +)) + +if res.declarative_source_definition_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | +| `request` | [api.UpdateDeclarativeSourceDefinitionRequest](../../api/updatedeclarativesourcedefinitionrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.UpdateDeclarativeSourceDefinitionResponse](../../api/updatedeclarativesourcedefinitionresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/destinationdefinitions/README.md b/docs/sdks/destinationdefinitions/README.md new file mode 100644 index 00000000..4347be65 --- /dev/null +++ b/docs/sdks/destinationdefinitions/README.md @@ -0,0 +1,254 @@ +# DestinationDefinitions +(*destination_definitions*) + +## Overview + +### Available Operations + +* [create_destination_definition](#create_destination_definition) - Create a destination definition. +* [delete_destination_definition](#delete_destination_definition) - Delete a destination definition. +* [get_destination_definition](#get_destination_definition) - Get destination definition details. +* [list_destination_definitions](#list_destination_definitions) - List destination definitions. +* [update_destination_definition](#update_destination_definition) - Update destination definition details. + +## create_destination_definition + +Create a destination definition. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.destination_definitions.create_destination_definition(request=api.CreateDestinationDefinitionRequest( + create_definition_request=models.CreateDefinitionRequest( + docker_image_tag='', + docker_repository='', + name='', + ), + workspace_id='f49928fc-e1f7-4278-9366-b5b974ad2068', +)) + +if res.definition_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ----------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------- | +| `request` | [api.CreateDestinationDefinitionRequest](../../api/createdestinationdefinitionrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.CreateDestinationDefinitionResponse](../../api/createdestinationdefinitionresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## delete_destination_definition + +Delete a destination definition. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.destination_definitions.delete_destination_definition(request=api.DeleteDestinationDefinitionRequest( + definition_id='7a6d93e0-5a99-4e33-87ce-c0e739faf1e9', + workspace_id='619cc567-a21d-4f39-90ab-7854d54c9c42', +)) + +if res.definition_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ----------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------- | +| `request` | [api.DeleteDestinationDefinitionRequest](../../api/deletedestinationdefinitionrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.DeleteDestinationDefinitionResponse](../../api/deletedestinationdefinitionresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## get_destination_definition + +Get destination definition details. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.destination_definitions.get_destination_definition(request=api.GetDestinationDefinitionRequest( + definition_id='5ddd49a6-7aa1-469d-bd19-fa66e3586402', + workspace_id='5a9c29a5-f169-496b-b3b1-ab05028ede0b', +)) + +if res.definition_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ----------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- | +| `request` | [api.GetDestinationDefinitionRequest](../../api/getdestinationdefinitionrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.GetDestinationDefinitionResponse](../../api/getdestinationdefinitionresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## list_destination_definitions + +List destination definitions. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.destination_definitions.list_destination_definitions(request=api.ListDestinationDefinitionsRequest( + workspace_id='f1f18267-b72b-4ea5-a29c-8742c80ceaf4', +)) + +if res.definitions_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| --------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- | +| `request` | [api.ListDestinationDefinitionsRequest](../../api/listdestinationdefinitionsrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.ListDestinationDefinitionsResponse](../../api/listdestinationdefinitionsresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## update_destination_definition + +Update destination definition details. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.destination_definitions.update_destination_definition(request=api.UpdateDestinationDefinitionRequest( + update_definition_request=models.UpdateDefinitionRequest( + docker_image_tag='', + name='', + ), + definition_id='97416649-dabf-43f9-8715-c5c8279f7f23', + workspace_id='98e0ed50-276f-49ae-ad18-43bc892bb109', +)) + +if res.definition_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ----------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------- | +| `request` | [api.UpdateDestinationDefinitionRequest](../../api/updatedestinationdefinitionrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.UpdateDestinationDefinitionResponse](../../api/updatedestinationdefinitionresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/destinations/README.md b/docs/sdks/destinations/README.md index 409b765a..927448fd 100644 --- a/docs/sdks/destinations/README.md +++ b/docs/sdks/destinations/README.md @@ -33,27 +33,12 @@ s = airbyte_api.AirbyteAPI( res = s.destinations.create_destination(request=models.DestinationCreateRequest( - configuration=models.DestinationPgvector( - embedding=models.DestinationPgvectorFake(), - indexing=models.PostgresConnection( - credentials=models.DestinationPgvectorCredentials( - password='AIRBYTE_PASSWORD', - ), - database='AIRBYTE_DATABASE', - host='AIRBYTE_ACCOUNT', - username='AIRBYTE_USER', - default_schema='AIRBYTE_SCHEMA', - port=5432, - ), - processing=models.DestinationPgvectorProcessingConfigModel( - chunk_size=540943, - metadata_fields=[ - 'age', - ], - text_fields=[ - 'users.*.name', - ], - ), + configuration=models.DestinationOracle( + host='instructive-mainstream.com', + sid='', + username='Robert.Legros98', + port=1521, + schema='airbyte', ), name='Postgres', workspace_id='2155ae5a-de39-4808-af6a-16fe7b8b4ed2', @@ -240,8 +225,11 @@ s = airbyte_api.AirbyteAPI( res = s.destinations.patch_destination(request=api.PatchDestinationRequest( destination_id='', destination_patch_request=models.DestinationPatchRequest( - configuration=models.DestinationDuckdb( - destination_path='motherduck:', + configuration=models.DestinationDevNull( + test_destination=models.Failing( + num_messages=992227, + test_destination_type=models.DestinationDevNullSchemasTestDestinationTestDestinationType.FAILING, + ), ), name='My Destination', ), @@ -292,10 +280,9 @@ s = airbyte_api.AirbyteAPI( res = s.destinations.put_destination(request=api.PutDestinationRequest( destination_id='', destination_put_request=models.DestinationPutRequest( - configuration=models.DestinationClickhouse( - database='', - host='urban-receptor.org', - username='Kaylie_Terry', + configuration=models.DestinationConvex( + access_key='', + deployment_url='https://cluttered-owl-337.convex.cloud', ), name='My Destination', ), diff --git a/docs/sdks/organizations/README.md b/docs/sdks/organizations/README.md index 3a448338..dfd50dff 100644 --- a/docs/sdks/organizations/README.md +++ b/docs/sdks/organizations/README.md @@ -5,8 +5,61 @@ ### Available Operations +* [create_or_update_organization_o_auth_credentials](#create_or_update_organization_o_auth_credentials) - Create OAuth override credentials for an organization and source type. * [list_organizations_for_user](#list_organizations_for_user) - List all organizations for a user +## create_or_update_organization_o_auth_credentials + +Create/update a set of OAuth credentials to override the Airbyte-provided OAuth credentials used for source/destination OAuth. +In order to determine what the credential configuration needs to be, please see the connector specification of the relevant source/destination. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.organizations.create_or_update_organization_o_auth_credentials(request=api.CreateOrUpdateOrganizationOAuthCredentialsRequest( + organization_o_auth_credentials_request=models.OrganizationOAuthCredentialsRequest( + actor_type=models.ActorTypeEnum.SOURCE, + configuration=models.Airtable(), + name='', + ), + organization_id='', +)) + +if res is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | +| `request` | [api.CreateOrUpdateOrganizationOAuthCredentialsRequest](../../api/createorupdateorganizationoauthcredentialsrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.CreateOrUpdateOrganizationOAuthCredentialsResponse](../../api/createorupdateorganizationoauthcredentialsresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + ## list_organizations_for_user Lists users organizations. diff --git a/docs/sdks/permissions/README.md b/docs/sdks/permissions/README.md index 8a04ef83..92e1bdd8 100644 --- a/docs/sdks/permissions/README.md +++ b/docs/sdks/permissions/README.md @@ -217,7 +217,7 @@ s = airbyte_api.AirbyteAPI( res = s.permissions.update_permission(request=api.UpdatePermissionRequest( permission_update_request=models.PermissionUpdateRequest( - permission_type=models.PermissionType.ORGANIZATION_MEMBER, + permission_type=models.PermissionType.WORKSPACE_OWNER, ), permission_id='', )) diff --git a/docs/sdks/sourcedefinitions/README.md b/docs/sdks/sourcedefinitions/README.md new file mode 100644 index 00000000..57d6748f --- /dev/null +++ b/docs/sdks/sourcedefinitions/README.md @@ -0,0 +1,254 @@ +# SourceDefinitions +(*source_definitions*) + +## Overview + +### Available Operations + +* [create_source_definition](#create_source_definition) - Create a source definition. +* [delete_source_definition](#delete_source_definition) - Delete a source definition. +* [get_source_definition](#get_source_definition) - Get source definition details. +* [list_source_definitions](#list_source_definitions) - List source definitions. +* [update_source_definition](#update_source_definition) - Update source definition details. + +## create_source_definition + +Create a source definition. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.source_definitions.create_source_definition(request=api.CreateSourceDefinitionRequest( + create_definition_request=models.CreateDefinitionRequest( + docker_image_tag='', + docker_repository='', + name='', + ), + workspace_id='06dbde72-63a8-4326-8f4b-67eb708f9ad6', +)) + +if res.definition_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | +| `request` | [api.CreateSourceDefinitionRequest](../../api/createsourcedefinitionrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.CreateSourceDefinitionResponse](../../api/createsourcedefinitionresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## delete_source_definition + +Delete a source definition. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.source_definitions.delete_source_definition(request=api.DeleteSourceDefinitionRequest( + definition_id='fddaf9d9-7e09-433e-8e25-895734ad8809', + workspace_id='9789f575-f200-4155-b7ec-0750094af77f', +)) + +if res.definition_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | +| `request` | [api.DeleteSourceDefinitionRequest](../../api/deletesourcedefinitionrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.DeleteSourceDefinitionResponse](../../api/deletesourcedefinitionresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## get_source_definition + +Get source definition details. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.source_definitions.get_source_definition(request=api.GetSourceDefinitionRequest( + definition_id='b6405f71-0930-4f13-a99b-6b1b0a882853', + workspace_id='e76093e5-5cd8-4b87-ab32-c620a178a1c3', +)) + +if res.definition_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------------- | ------------------------------------------------------------------------- | ------------------------------------------------------------------------- | ------------------------------------------------------------------------- | +| `request` | [api.GetSourceDefinitionRequest](../../api/getsourcedefinitionrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.GetSourceDefinitionResponse](../../api/getsourcedefinitionresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## list_source_definitions + +List source definitions. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.source_definitions.list_source_definitions(request=api.ListSourceDefinitionsRequest( + workspace_id='fb60a310-f38b-47cb-9633-01f0cf740c18', +)) + +if res.definitions_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ----------------------------------------------------------------------------- | ----------------------------------------------------------------------------- | ----------------------------------------------------------------------------- | ----------------------------------------------------------------------------- | +| `request` | [api.ListSourceDefinitionsRequest](../../api/listsourcedefinitionsrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.ListSourceDefinitionsResponse](../../api/listsourcedefinitionsresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## update_source_definition + +Update source definition details. + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.source_definitions.update_source_definition(request=api.UpdateSourceDefinitionRequest( + update_definition_request=models.UpdateDefinitionRequest( + docker_image_tag='', + name='', + ), + definition_id='6eaf6fbb-3e08-4f73-9ff1-de62553abd76', + workspace_id='b6bd5c36-3814-4489-97fb-3e48c1e0fdea', +)) + +if res.definition_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | +| `request` | [api.UpdateSourceDefinitionRequest](../../api/updatesourcedefinitionrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.UpdateSourceDefinitionResponse](../../api/updatesourcedefinitionresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/sources/README.md b/docs/sdks/sources/README.md index c82f5afb..9f737009 100644 --- a/docs/sdks/sources/README.md +++ b/docs/sdks/sources/README.md @@ -21,7 +21,6 @@ Creates a source given a name, workspace id, and a json blob containing the conf ```python import airbyte_api -import dateutil.parser from airbyte_api import models s = airbyte_api.AirbyteAPI( @@ -35,9 +34,9 @@ s = airbyte_api.AirbyteAPI( res = s.sources.create_source(request=models.SourceCreateRequest( - configuration=models.SourcePosthog( + configuration=models.SourcePlausible( api_key='', - start_date=dateutil.parser.isoparse('2021-01-01T00:00:00Z'), + site_id='docs.airbyte.com', ), name='My Source', workspace_id='744cc0ed-7f05-4949-9e60-2a814f90c035', @@ -183,7 +182,7 @@ s = airbyte_api.AirbyteAPI( res = s.sources.initiate_o_auth(request=models.InitiateOauthRequest( redirect_url='https://cloud.airbyte.io/v1/api/oauth/callback', - source_type=models.OAuthActorNames.GITLAB, + source_type=models.OAuthActorNames.FACEBOOK_PAGES, workspace_id='871d9b60-11d1-44cb-8c92-c246d53bf87e', )) @@ -280,8 +279,8 @@ s = airbyte_api.AirbyteAPI( res = s.sources.patch_source(request=api.PatchSourceRequest( source_id='', source_patch_request=models.SourcePatchRequest( - configuration=models.SourceEventzilla( - x_api_key='', + configuration=models.SourceEncharge( + api_key='', ), name='My Source', workspace_id='744cc0ed-7f05-4949-9e60-2a814f90c035', diff --git a/docs/sdks/tags/README.md b/docs/sdks/tags/README.md new file mode 100644 index 00000000..32b6e35b --- /dev/null +++ b/docs/sdks/tags/README.md @@ -0,0 +1,246 @@ +# Tags +(*tags*) + +## Overview + +### Available Operations + +* [create_tag](#create_tag) - Create a tag +* [delete_tag](#delete_tag) - Delete a tag +* [get_tag](#get_tag) - Get a tag +* [list_tags](#list_tags) - List all tags +* [update_tag](#update_tag) - Update a tag + +## create_tag + +Create a tag + +### Example Usage + +```python +import airbyte_api +from airbyte_api import models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.tags.create_tag(request=models.TagCreateRequest( + color='blue', + name='', + workspace_id='5f85d5ab-c889-4273-91d7-c22bac981db2', +)) + +if res.tag_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ----------------------------------------------------------- | ----------------------------------------------------------- | ----------------------------------------------------------- | ----------------------------------------------------------- | +| `request` | [models.TagCreateRequest](../../models/tagcreaterequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.CreateTagResponse](../../api/createtagresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## delete_tag + +Delete a tag + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.tags.delete_tag(request=api.DeleteTagRequest( + tag_id='da1c4fd4-2786-4b27-8b72-2335c85a5af8', +)) + +if res is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ----------------------------------------------------- | ----------------------------------------------------- | ----------------------------------------------------- | ----------------------------------------------------- | +| `request` | [api.DeleteTagRequest](../../api/deletetagrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.DeleteTagResponse](../../api/deletetagresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## get_tag + +Get a tag + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.tags.get_tag(request=api.GetTagRequest( + tag_id='808ab48f-5790-47fe-aa1e-3073281a0300', +)) + +if res.tag_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ----------------------------------------------- | ----------------------------------------------- | ----------------------------------------------- | ----------------------------------------------- | +| `request` | [api.GetTagRequest](../../api/gettagrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.GetTagResponse](../../api/gettagresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## list_tags + +Lists all tags + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.tags.list_tags(request=api.ListTagsRequest()) + +if res.tags_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| --------------------------------------------------- | --------------------------------------------------- | --------------------------------------------------- | --------------------------------------------------- | +| `request` | [api.ListTagsRequest](../../api/listtagsrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.ListTagsResponse](../../api/listtagsresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | + +## update_tag + +Update a tag + +### Example Usage + +```python +import airbyte_api +from airbyte_api import api, models + +s = airbyte_api.AirbyteAPI( + security=models.Security( + basic_auth=models.SchemeBasicAuth( + password='', + username='', + ), + ), +) + + +res = s.tags.update_tag(request=api.UpdateTagRequest( + tag_patch_request=models.TagPatchRequest( + color='turquoise', + name='', + ), + tag_id='3043493e-7596-4d2b-8ee9-859838c615f6', +)) + +if res.tag_response is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ----------------------------------------------------- | ----------------------------------------------------- | ----------------------------------------------------- | ----------------------------------------------------- | +| `request` | [api.UpdateTagRequest](../../api/updatetagrequest.md) | :heavy_check_mark: | The request object to use for the request. | + +### Response + +**[api.UpdateTagResponse](../../api/updatetagresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| errors.SDKError | 4XX, 5XX | \*/\* | \ No newline at end of file diff --git a/docs/sdks/workspaces/README.md b/docs/sdks/workspaces/README.md index 59edced4..eb08a6b2 100644 --- a/docs/sdks/workspaces/README.md +++ b/docs/sdks/workspaces/README.md @@ -36,10 +36,8 @@ s = airbyte_api.AirbyteAPI( res = s.workspaces.create_or_update_workspace_o_auth_credentials(request=api.CreateOrUpdateWorkspaceOAuthCredentialsRequest( workspace_o_auth_credentials_request=models.WorkspaceOAuthCredentialsRequest( actor_type=models.ActorTypeEnum.DESTINATION, - configuration={ - 'user': 'charles', - }, - name=models.OAuthActorNames.AMAZON_ADS, + configuration=models.Airtable(), + name=models.OAuthActorNames.MICROSOFT_TEAMS, ), workspace_id='', )) diff --git a/gen.yaml b/gen.yaml index f1f7b1cf..5b5b7466 100644 --- a/gen.yaml +++ b/gen.yaml @@ -5,14 +5,15 @@ generation: optionalPropertyRendering: withExample useClassNamesForArrayFields: true fixes: - nameResolutionDec2023: false + nameResolutionFeb2025: false parameterOrderingFeb2024: false requestResponseComponentNamesFeb2024: false + securityFeb2025: false auth: oAuth2ClientCredentialsEnabled: true oAuth2PasswordEnabled: false python: - version: 0.52.2 + version: 0.53.0 additionalDependencies: dependencies: {} extraDependencies: diff --git a/pylintrc b/pylintrc index af1efee5..0016e742 100644 --- a/pylintrc +++ b/pylintrc @@ -181,8 +181,9 @@ good-names=i, Run, _, e, + id, db, - id + to # Good variable names regexes, separated by a comma. If names match any regex, # they will always be accepted diff --git a/setup.py b/setup.py index 8bfea1bb..ea3e63e4 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ setuptools.setup( name='airbyte-api', - version='0.52.2', + version='0.53.0', author='Airbyte', description='Python Client SDK for Airbyte API', url='https://github.com/airbytehq/airbyte-api-python-sdk.git', diff --git a/src/airbyte_api/api/__init__.py b/src/airbyte_api/api/__init__.py index ea814f7f..433ccd6a 100644 --- a/src/airbyte_api/api/__init__.py +++ b/src/airbyte_api/api/__init__.py @@ -2,32 +2,49 @@ from .canceljob import * from .createconnection import * +from .createdeclarativesourcedefinition import * from .createdestination import * +from .createdestinationdefinition import * from .createjob import * +from .createorupdateorganizationoauthcredentials import * from .createorupdateworkspaceoauthcredentials import * from .createpermission import * from .createsource import * +from .createsourcedefinition import * +from .createtag import * from .createworkspace import * from .deleteconnection import * +from .deletedeclarativesourcedefinition import * from .deletedestination import * +from .deletedestinationdefinition import * from .deletepermission import * from .deletesource import * +from .deletesourcedefinition import * +from .deletetag import * from .deleteworkspace import * from .getconnection import * +from .getdeclarativesourcedefinition import * from .getdestination import * +from .getdestinationdefinition import * from .gethealthcheck import * from .getjob import * from .getpermission import * from .getsource import * +from .getsourcedefinition import * from .getstreamproperties import * +from .gettag import * from .getworkspace import * from .initiateoauth import * from .listconnections import * +from .listdeclarativesourcedefinitions import * +from .listdestinationdefinitions import * from .listdestinations import * from .listjobs import * from .listorganizationsforuser import * from .listpermissions import * +from .listsourcedefinitions import * from .listsources import * +from .listtags import * from .listuserswithinanorganization import * from .listworkspaces import * from .patchconnection import * @@ -35,7 +52,11 @@ from .patchsource import * from .putdestination import * from .putsource import * +from .updatedeclarativesourcedefinition import * +from .updatedestinationdefinition import * from .updatepermission import * +from .updatesourcedefinition import * +from .updatetag import * from .updateworkspace import * -__all__ = ["CancelJobRequest","CancelJobResponse","CreateConnectionResponse","CreateDestinationResponse","CreateJobResponse","CreateOrUpdateWorkspaceOAuthCredentialsRequest","CreateOrUpdateWorkspaceOAuthCredentialsResponse","CreatePermissionResponse","CreateSourceResponse","CreateWorkspaceResponse","DeleteConnectionRequest","DeleteConnectionResponse","DeleteDestinationRequest","DeleteDestinationResponse","DeletePermissionRequest","DeletePermissionResponse","DeleteSourceRequest","DeleteSourceResponse","DeleteWorkspaceRequest","DeleteWorkspaceResponse","GetConnectionRequest","GetConnectionResponse","GetDestinationRequest","GetDestinationResponse","GetHealthCheckResponse","GetJobRequest","GetJobResponse","GetPermissionRequest","GetPermissionResponse","GetSourceRequest","GetSourceResponse","GetStreamPropertiesRequest","GetStreamPropertiesResponse","GetWorkspaceRequest","GetWorkspaceResponse","InitiateOAuthResponse","ListConnectionsRequest","ListConnectionsResponse","ListDestinationsRequest","ListDestinationsResponse","ListJobsRequest","ListJobsResponse","ListOrganizationsForUserResponse","ListPermissionsRequest","ListPermissionsResponse","ListSourcesRequest","ListSourcesResponse","ListUsersWithinAnOrganizationRequest","ListUsersWithinAnOrganizationResponse","ListWorkspacesRequest","ListWorkspacesResponse","PatchConnectionRequest","PatchConnectionResponse","PatchDestinationRequest","PatchDestinationResponse","PatchSourceRequest","PatchSourceResponse","PutDestinationRequest","PutDestinationResponse","PutSourceRequest","PutSourceResponse","UpdatePermissionRequest","UpdatePermissionResponse","UpdateWorkspaceRequest","UpdateWorkspaceResponse"] +__all__ = ["CancelJobRequest","CancelJobResponse","CreateConnectionResponse","CreateDeclarativeSourceDefinitionRequest","CreateDeclarativeSourceDefinitionResponse","CreateDestinationDefinitionRequest","CreateDestinationDefinitionResponse","CreateDestinationResponse","CreateJobResponse","CreateOrUpdateOrganizationOAuthCredentialsRequest","CreateOrUpdateOrganizationOAuthCredentialsResponse","CreateOrUpdateWorkspaceOAuthCredentialsRequest","CreateOrUpdateWorkspaceOAuthCredentialsResponse","CreatePermissionResponse","CreateSourceDefinitionRequest","CreateSourceDefinitionResponse","CreateSourceResponse","CreateTagResponse","CreateWorkspaceResponse","DeleteConnectionRequest","DeleteConnectionResponse","DeleteDeclarativeSourceDefinitionRequest","DeleteDeclarativeSourceDefinitionResponse","DeleteDestinationDefinitionRequest","DeleteDestinationDefinitionResponse","DeleteDestinationRequest","DeleteDestinationResponse","DeletePermissionRequest","DeletePermissionResponse","DeleteSourceDefinitionRequest","DeleteSourceDefinitionResponse","DeleteSourceRequest","DeleteSourceResponse","DeleteTagRequest","DeleteTagResponse","DeleteWorkspaceRequest","DeleteWorkspaceResponse","GetConnectionRequest","GetConnectionResponse","GetDeclarativeSourceDefinitionRequest","GetDeclarativeSourceDefinitionResponse","GetDestinationDefinitionRequest","GetDestinationDefinitionResponse","GetDestinationRequest","GetDestinationResponse","GetHealthCheckResponse","GetJobRequest","GetJobResponse","GetPermissionRequest","GetPermissionResponse","GetSourceDefinitionRequest","GetSourceDefinitionResponse","GetSourceRequest","GetSourceResponse","GetStreamPropertiesRequest","GetStreamPropertiesResponse","GetTagRequest","GetTagResponse","GetWorkspaceRequest","GetWorkspaceResponse","InitiateOAuthResponse","ListConnectionsRequest","ListConnectionsResponse","ListDeclarativeSourceDefinitionsRequest","ListDeclarativeSourceDefinitionsResponse","ListDestinationDefinitionsRequest","ListDestinationDefinitionsResponse","ListDestinationsRequest","ListDestinationsResponse","ListJobsRequest","ListJobsResponse","ListOrganizationsForUserResponse","ListPermissionsRequest","ListPermissionsResponse","ListSourceDefinitionsRequest","ListSourceDefinitionsResponse","ListSourcesRequest","ListSourcesResponse","ListTagsRequest","ListTagsResponse","ListUsersWithinAnOrganizationRequest","ListUsersWithinAnOrganizationResponse","ListWorkspacesRequest","ListWorkspacesResponse","PatchConnectionRequest","PatchConnectionResponse","PatchDestinationRequest","PatchDestinationResponse","PatchSourceRequest","PatchSourceResponse","PutDestinationRequest","PutDestinationResponse","PutSourceRequest","PutSourceResponse","UpdateDeclarativeSourceDefinitionRequest","UpdateDeclarativeSourceDefinitionResponse","UpdateDestinationDefinitionRequest","UpdateDestinationDefinitionResponse","UpdatePermissionRequest","UpdatePermissionResponse","UpdateSourceDefinitionRequest","UpdateSourceDefinitionResponse","UpdateTagRequest","UpdateTagResponse","UpdateWorkspaceRequest","UpdateWorkspaceResponse"] diff --git a/src/airbyte_api/api/createdeclarativesourcedefinition.py b/src/airbyte_api/api/createdeclarativesourcedefinition.py new file mode 100644 index 00000000..f8a635c1 --- /dev/null +++ b/src/airbyte_api/api/createdeclarativesourcedefinition.py @@ -0,0 +1,30 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import createdeclarativesourcedefinitionrequest as models_createdeclarativesourcedefinitionrequest +from ..models import declarativesourcedefinitionresponse as models_declarativesourcedefinitionresponse +from typing import Optional + + +@dataclasses.dataclass +class CreateDeclarativeSourceDefinitionRequest: + create_declarative_source_definition_request: models_createdeclarativesourcedefinitionrequest.CreateDeclarativeSourceDefinitionRequest = dataclasses.field(metadata={'request': { 'media_type': 'application/json' }}) + workspace_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'workspaceId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class CreateDeclarativeSourceDefinitionResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + declarative_source_definition_response: Optional[models_declarativesourcedefinitionresponse.DeclarativeSourceDefinitionResponse] = dataclasses.field(default=None) + r"""Success""" + + diff --git a/src/airbyte_api/api/createdestinationdefinition.py b/src/airbyte_api/api/createdestinationdefinition.py new file mode 100644 index 00000000..9f7b55d8 --- /dev/null +++ b/src/airbyte_api/api/createdestinationdefinition.py @@ -0,0 +1,30 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import createdefinitionrequest as models_createdefinitionrequest +from ..models import definitionresponse as models_definitionresponse +from typing import Optional + + +@dataclasses.dataclass +class CreateDestinationDefinitionRequest: + create_definition_request: models_createdefinitionrequest.CreateDefinitionRequest = dataclasses.field(metadata={'request': { 'media_type': 'application/json' }}) + workspace_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'workspaceId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class CreateDestinationDefinitionResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + definition_response: Optional[models_definitionresponse.DefinitionResponse] = dataclasses.field(default=None) + r"""Success""" + + diff --git a/src/airbyte_api/api/createorupdateorganizationoauthcredentials.py b/src/airbyte_api/api/createorupdateorganizationoauthcredentials.py new file mode 100644 index 00000000..67b98399 --- /dev/null +++ b/src/airbyte_api/api/createorupdateorganizationoauthcredentials.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import organizationoauthcredentialsrequest as models_organizationoauthcredentialsrequest + + +@dataclasses.dataclass +class CreateOrUpdateOrganizationOAuthCredentialsRequest: + organization_o_auth_credentials_request: models_organizationoauthcredentialsrequest.OrganizationOAuthCredentialsRequest = dataclasses.field(metadata={'request': { 'media_type': 'application/json' }}) + organization_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'organizationId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class CreateOrUpdateOrganizationOAuthCredentialsResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + + diff --git a/src/airbyte_api/api/createsourcedefinition.py b/src/airbyte_api/api/createsourcedefinition.py new file mode 100644 index 00000000..084992e2 --- /dev/null +++ b/src/airbyte_api/api/createsourcedefinition.py @@ -0,0 +1,30 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import createdefinitionrequest as models_createdefinitionrequest +from ..models import definitionresponse as models_definitionresponse +from typing import Optional + + +@dataclasses.dataclass +class CreateSourceDefinitionRequest: + create_definition_request: models_createdefinitionrequest.CreateDefinitionRequest = dataclasses.field(metadata={'request': { 'media_type': 'application/json' }}) + workspace_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'workspaceId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class CreateSourceDefinitionResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + definition_response: Optional[models_definitionresponse.DefinitionResponse] = dataclasses.field(default=None) + r"""Success""" + + diff --git a/src/airbyte_api/api/createtag.py b/src/airbyte_api/api/createtag.py new file mode 100644 index 00000000..b50b99f8 --- /dev/null +++ b/src/airbyte_api/api/createtag.py @@ -0,0 +1,21 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import tagresponse as models_tagresponse +from typing import Optional + + +@dataclasses.dataclass +class CreateTagResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + tag_response: Optional[models_tagresponse.TagResponse] = dataclasses.field(default=None) + r"""Successful operation""" + + diff --git a/src/airbyte_api/api/deletedeclarativesourcedefinition.py b/src/airbyte_api/api/deletedeclarativesourcedefinition.py new file mode 100644 index 00000000..0ff6817f --- /dev/null +++ b/src/airbyte_api/api/deletedeclarativesourcedefinition.py @@ -0,0 +1,29 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import declarativesourcedefinitionresponse as models_declarativesourcedefinitionresponse +from typing import Optional + + +@dataclasses.dataclass +class DeleteDeclarativeSourceDefinitionRequest: + definition_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'definitionId', 'style': 'simple', 'explode': False }}) + workspace_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'workspaceId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class DeleteDeclarativeSourceDefinitionResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + declarative_source_definition_response: Optional[models_declarativesourcedefinitionresponse.DeclarativeSourceDefinitionResponse] = dataclasses.field(default=None) + r"""Success""" + + diff --git a/src/airbyte_api/api/deletedestinationdefinition.py b/src/airbyte_api/api/deletedestinationdefinition.py new file mode 100644 index 00000000..b48585bd --- /dev/null +++ b/src/airbyte_api/api/deletedestinationdefinition.py @@ -0,0 +1,29 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import definitionresponse as models_definitionresponse +from typing import Optional + + +@dataclasses.dataclass +class DeleteDestinationDefinitionRequest: + definition_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'definitionId', 'style': 'simple', 'explode': False }}) + workspace_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'workspaceId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class DeleteDestinationDefinitionResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + definition_response: Optional[models_definitionresponse.DefinitionResponse] = dataclasses.field(default=None) + r"""Success""" + + diff --git a/src/airbyte_api/api/deletesourcedefinition.py b/src/airbyte_api/api/deletesourcedefinition.py new file mode 100644 index 00000000..3809262a --- /dev/null +++ b/src/airbyte_api/api/deletesourcedefinition.py @@ -0,0 +1,29 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import definitionresponse as models_definitionresponse +from typing import Optional + + +@dataclasses.dataclass +class DeleteSourceDefinitionRequest: + definition_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'definitionId', 'style': 'simple', 'explode': False }}) + workspace_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'workspaceId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class DeleteSourceDefinitionResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + definition_response: Optional[models_definitionresponse.DefinitionResponse] = dataclasses.field(default=None) + r"""Success""" + + diff --git a/src/airbyte_api/api/deletetag.py b/src/airbyte_api/api/deletetag.py new file mode 100644 index 00000000..0b3098a4 --- /dev/null +++ b/src/airbyte_api/api/deletetag.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http + + +@dataclasses.dataclass +class DeleteTagRequest: + tag_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'tagId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class DeleteTagResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + + diff --git a/src/airbyte_api/api/getdeclarativesourcedefinition.py b/src/airbyte_api/api/getdeclarativesourcedefinition.py new file mode 100644 index 00000000..3f7c1235 --- /dev/null +++ b/src/airbyte_api/api/getdeclarativesourcedefinition.py @@ -0,0 +1,29 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import declarativesourcedefinitionresponse as models_declarativesourcedefinitionresponse +from typing import Optional + + +@dataclasses.dataclass +class GetDeclarativeSourceDefinitionRequest: + definition_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'definitionId', 'style': 'simple', 'explode': False }}) + workspace_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'workspaceId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class GetDeclarativeSourceDefinitionResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + declarative_source_definition_response: Optional[models_declarativesourcedefinitionresponse.DeclarativeSourceDefinitionResponse] = dataclasses.field(default=None) + r"""Success""" + + diff --git a/src/airbyte_api/api/getdestination.py b/src/airbyte_api/api/getdestination.py index 1026dd0c..a8e245e3 100644 --- a/src/airbyte_api/api/getdestination.py +++ b/src/airbyte_api/api/getdestination.py @@ -10,6 +10,8 @@ @dataclasses.dataclass class GetDestinationRequest: destination_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'destinationId', 'style': 'simple', 'explode': False }}) + include_secret_coordinates: Optional[bool] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'includeSecretCoordinates', 'style': 'form', 'explode': True }}) + r"""Rather than return *** for secret properties include the secret coordinate information""" diff --git a/src/airbyte_api/api/getdestinationdefinition.py b/src/airbyte_api/api/getdestinationdefinition.py new file mode 100644 index 00000000..d84495f3 --- /dev/null +++ b/src/airbyte_api/api/getdestinationdefinition.py @@ -0,0 +1,29 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import definitionresponse as models_definitionresponse +from typing import Optional + + +@dataclasses.dataclass +class GetDestinationDefinitionRequest: + definition_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'definitionId', 'style': 'simple', 'explode': False }}) + workspace_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'workspaceId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class GetDestinationDefinitionResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + definition_response: Optional[models_definitionresponse.DefinitionResponse] = dataclasses.field(default=None) + r"""Success""" + + diff --git a/src/airbyte_api/api/getsource.py b/src/airbyte_api/api/getsource.py index 8b742ef4..7c113c63 100644 --- a/src/airbyte_api/api/getsource.py +++ b/src/airbyte_api/api/getsource.py @@ -10,6 +10,8 @@ @dataclasses.dataclass class GetSourceRequest: source_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'sourceId', 'style': 'simple', 'explode': False }}) + include_secret_coordinates: Optional[bool] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'includeSecretCoordinates', 'style': 'form', 'explode': True }}) + r"""Rather than return *** for secret properties include the secret coordinate information""" diff --git a/src/airbyte_api/api/getsourcedefinition.py b/src/airbyte_api/api/getsourcedefinition.py new file mode 100644 index 00000000..abf2bb32 --- /dev/null +++ b/src/airbyte_api/api/getsourcedefinition.py @@ -0,0 +1,29 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import definitionresponse as models_definitionresponse +from typing import Optional + + +@dataclasses.dataclass +class GetSourceDefinitionRequest: + definition_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'definitionId', 'style': 'simple', 'explode': False }}) + workspace_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'workspaceId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class GetSourceDefinitionResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + definition_response: Optional[models_definitionresponse.DefinitionResponse] = dataclasses.field(default=None) + r"""Success""" + + diff --git a/src/airbyte_api/api/gettag.py b/src/airbyte_api/api/gettag.py new file mode 100644 index 00000000..c0855075 --- /dev/null +++ b/src/airbyte_api/api/gettag.py @@ -0,0 +1,28 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import tagresponse as models_tagresponse +from typing import Optional + + +@dataclasses.dataclass +class GetTagRequest: + tag_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'tagId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class GetTagResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + tag_response: Optional[models_tagresponse.TagResponse] = dataclasses.field(default=None) + r"""Successful operation""" + + diff --git a/src/airbyte_api/api/listconnections.py b/src/airbyte_api/api/listconnections.py index cacc8fb3..60d483e6 100644 --- a/src/airbyte_api/api/listconnections.py +++ b/src/airbyte_api/api/listconnections.py @@ -15,6 +15,8 @@ class ListConnectionsRequest: r"""Set the limit on the number of Connections returned. The default is 20.""" offset: Optional[int] = dataclasses.field(default=0, metadata={'query_param': { 'field_name': 'offset', 'style': 'form', 'explode': True }}) r"""Set the offset to start at when returning Connections. The default is 0""" + tag_ids: Optional[List[str]] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'tagIds', 'style': 'form', 'explode': True }}) + r"""The UUIDs of the tags you wish to list connections for. Empty list will retrieve all connections.""" workspace_ids: Optional[List[str]] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceIds', 'style': 'form', 'explode': True }}) r"""The UUIDs of the workspaces you wish to list connections for. Empty list will retrieve all allowed workspaces.""" diff --git a/src/airbyte_api/api/listdeclarativesourcedefinitions.py b/src/airbyte_api/api/listdeclarativesourcedefinitions.py new file mode 100644 index 00000000..f24409d5 --- /dev/null +++ b/src/airbyte_api/api/listdeclarativesourcedefinitions.py @@ -0,0 +1,28 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import declarativesourcedefinitionsresponse as models_declarativesourcedefinitionsresponse +from typing import Optional + + +@dataclasses.dataclass +class ListDeclarativeSourceDefinitionsRequest: + workspace_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'workspaceId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class ListDeclarativeSourceDefinitionsResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + declarative_source_definitions_response: Optional[models_declarativesourcedefinitionsresponse.DeclarativeSourceDefinitionsResponse] = dataclasses.field(default=None) + r"""Successful operation""" + + diff --git a/src/airbyte_api/api/listdestinationdefinitions.py b/src/airbyte_api/api/listdestinationdefinitions.py new file mode 100644 index 00000000..dd33643d --- /dev/null +++ b/src/airbyte_api/api/listdestinationdefinitions.py @@ -0,0 +1,28 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import definitionsresponse as models_definitionsresponse +from typing import Optional + + +@dataclasses.dataclass +class ListDestinationDefinitionsRequest: + workspace_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'workspaceId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class ListDestinationDefinitionsResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + definitions_response: Optional[models_definitionsresponse.DefinitionsResponse] = dataclasses.field(default=None) + r"""Successful operation""" + + diff --git a/src/airbyte_api/api/listsourcedefinitions.py b/src/airbyte_api/api/listsourcedefinitions.py new file mode 100644 index 00000000..0a1c5275 --- /dev/null +++ b/src/airbyte_api/api/listsourcedefinitions.py @@ -0,0 +1,28 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import definitionsresponse as models_definitionsresponse +from typing import Optional + + +@dataclasses.dataclass +class ListSourceDefinitionsRequest: + workspace_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'workspaceId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class ListSourceDefinitionsResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + definitions_response: Optional[models_definitionsresponse.DefinitionsResponse] = dataclasses.field(default=None) + r"""Successful operation""" + + diff --git a/src/airbyte_api/api/listtags.py b/src/airbyte_api/api/listtags.py new file mode 100644 index 00000000..755159eb --- /dev/null +++ b/src/airbyte_api/api/listtags.py @@ -0,0 +1,28 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import tagsresponse as models_tagsresponse +from typing import List, Optional + + +@dataclasses.dataclass +class ListTagsRequest: + workspace_ids: Optional[List[str]] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'workspaceIds', 'style': 'form', 'explode': True }}) + + + + +@dataclasses.dataclass +class ListTagsResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + tags_response: Optional[models_tagsresponse.TagsResponse] = dataclasses.field(default=None) + r"""List Tags.""" + + diff --git a/src/airbyte_api/api/updatedeclarativesourcedefinition.py b/src/airbyte_api/api/updatedeclarativesourcedefinition.py new file mode 100644 index 00000000..d3edd045 --- /dev/null +++ b/src/airbyte_api/api/updatedeclarativesourcedefinition.py @@ -0,0 +1,31 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import declarativesourcedefinitionresponse as models_declarativesourcedefinitionresponse +from ..models import updatedeclarativesourcedefinitionrequest as models_updatedeclarativesourcedefinitionrequest +from typing import Optional + + +@dataclasses.dataclass +class UpdateDeclarativeSourceDefinitionRequest: + update_declarative_source_definition_request: models_updatedeclarativesourcedefinitionrequest.UpdateDeclarativeSourceDefinitionRequest = dataclasses.field(metadata={'request': { 'media_type': 'application/json' }}) + definition_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'definitionId', 'style': 'simple', 'explode': False }}) + workspace_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'workspaceId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class UpdateDeclarativeSourceDefinitionResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + declarative_source_definition_response: Optional[models_declarativesourcedefinitionresponse.DeclarativeSourceDefinitionResponse] = dataclasses.field(default=None) + r"""Success""" + + diff --git a/src/airbyte_api/api/updatedestinationdefinition.py b/src/airbyte_api/api/updatedestinationdefinition.py new file mode 100644 index 00000000..20466939 --- /dev/null +++ b/src/airbyte_api/api/updatedestinationdefinition.py @@ -0,0 +1,31 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import definitionresponse as models_definitionresponse +from ..models import updatedefinitionrequest as models_updatedefinitionrequest +from typing import Optional + + +@dataclasses.dataclass +class UpdateDestinationDefinitionRequest: + update_definition_request: models_updatedefinitionrequest.UpdateDefinitionRequest = dataclasses.field(metadata={'request': { 'media_type': 'application/json' }}) + definition_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'definitionId', 'style': 'simple', 'explode': False }}) + workspace_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'workspaceId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class UpdateDestinationDefinitionResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + definition_response: Optional[models_definitionresponse.DefinitionResponse] = dataclasses.field(default=None) + r"""Success""" + + diff --git a/src/airbyte_api/api/updatesourcedefinition.py b/src/airbyte_api/api/updatesourcedefinition.py new file mode 100644 index 00000000..3c67ef28 --- /dev/null +++ b/src/airbyte_api/api/updatesourcedefinition.py @@ -0,0 +1,31 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import definitionresponse as models_definitionresponse +from ..models import updatedefinitionrequest as models_updatedefinitionrequest +from typing import Optional + + +@dataclasses.dataclass +class UpdateSourceDefinitionRequest: + update_definition_request: models_updatedefinitionrequest.UpdateDefinitionRequest = dataclasses.field(metadata={'request': { 'media_type': 'application/json' }}) + definition_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'definitionId', 'style': 'simple', 'explode': False }}) + workspace_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'workspaceId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class UpdateSourceDefinitionResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + definition_response: Optional[models_definitionresponse.DefinitionResponse] = dataclasses.field(default=None) + r"""Success""" + + diff --git a/src/airbyte_api/api/updatetag.py b/src/airbyte_api/api/updatetag.py new file mode 100644 index 00000000..22ff5983 --- /dev/null +++ b/src/airbyte_api/api/updatetag.py @@ -0,0 +1,30 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import requests as requests_http +from ..models import tagpatchrequest as models_tagpatchrequest +from ..models import tagresponse as models_tagresponse +from typing import Optional + + +@dataclasses.dataclass +class UpdateTagRequest: + tag_patch_request: models_tagpatchrequest.TagPatchRequest = dataclasses.field(metadata={'request': { 'media_type': 'application/json' }}) + tag_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'tagId', 'style': 'simple', 'explode': False }}) + + + + +@dataclasses.dataclass +class UpdateTagResponse: + content_type: str = dataclasses.field() + r"""HTTP response content type for this operation""" + status_code: int = dataclasses.field() + r"""HTTP response status code for this operation""" + raw_response: requests_http.Response = dataclasses.field() + r"""Raw HTTP response; suitable for custom response parsing""" + tag_response: Optional[models_tagresponse.TagResponse] = dataclasses.field(default=None) + r"""Successful operation""" + + diff --git a/src/airbyte_api/connections.py b/src/airbyte_api/connections.py index 0ecc7a2d..08f9fc3e 100644 --- a/src/airbyte_api/connections.py +++ b/src/airbyte_api/connections.py @@ -65,7 +65,9 @@ def create_connection(self, request: models.ConnectionCreateRequest) -> api.Crea else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -114,7 +116,9 @@ def delete_connection(self, request: api.DeleteConnectionRequest) -> api.DeleteC if http_res.status_code == 204: pass - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -169,7 +173,9 @@ def get_connection(self, request: api.GetConnectionRequest) -> api.GetConnection else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -225,7 +231,9 @@ def list_connections(self, request: api.ListConnectionsRequest) -> api.ListConne else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -285,7 +293,9 @@ def patch_connection(self, request: api.PatchConnectionRequest) -> api.PatchConn else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) diff --git a/src/airbyte_api/declarativesourcedefinitions.py b/src/airbyte_api/declarativesourcedefinitions.py new file mode 100644 index 00000000..1d7f919b --- /dev/null +++ b/src/airbyte_api/declarativesourcedefinitions.py @@ -0,0 +1,311 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +import requests as requests_http +from .sdkconfiguration import SDKConfiguration +from airbyte_api import api, errors, models, utils +from airbyte_api._hooks import AfterErrorContext, AfterSuccessContext, BeforeRequestContext, HookContext +from typing import Optional + +class DeclarativeSourceDefinitions: + sdk_configuration: SDKConfiguration + + def __init__(self, sdk_config: SDKConfiguration) -> None: + self.sdk_configuration = sdk_config + + + + def create_declarative_source_definition(self, request: api.CreateDeclarativeSourceDefinitionRequest) -> api.CreateDeclarativeSourceDefinitionResponse: + r"""Create a declarative source definition.""" + hook_ctx = HookContext(operation_id='createDeclarativeSourceDefinition', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/workspaces/{workspaceId}/definitions/declarative_sources', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + req_content_type, data, form = utils.serialize_request_body(request, api.CreateDeclarativeSourceDefinitionRequest, "create_declarative_source_definition_request", False, False, 'json') + if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): + headers['content-type'] = req_content_type + if data is None and form is None: + raise Exception('request body is required') + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('POST', url, params=query_params, data=data, files=form, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.CreateDeclarativeSourceDefinitionResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.DeclarativeSourceDefinitionResponse]) + res.declarative_source_definition_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + + def delete_declarative_source_definition(self, request: api.DeleteDeclarativeSourceDefinitionRequest) -> api.DeleteDeclarativeSourceDefinitionResponse: + r"""Delete a declarative source definition.""" + hook_ctx = HookContext(operation_id='deleteDeclarativeSourceDefinition', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/workspaces/{workspaceId}/definitions/declarative_sources/{definitionId}', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('DELETE', url, params=query_params, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.DeleteDeclarativeSourceDefinitionResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.DeclarativeSourceDefinitionResponse]) + res.declarative_source_definition_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + + def get_declarative_source_definition(self, request: api.GetDeclarativeSourceDefinitionRequest) -> api.GetDeclarativeSourceDefinitionResponse: + r"""Get declarative source definition details.""" + hook_ctx = HookContext(operation_id='getDeclarativeSourceDefinition', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/workspaces/{workspaceId}/definitions/declarative_sources/{definitionId}', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.GetDeclarativeSourceDefinitionResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.DeclarativeSourceDefinitionResponse]) + res.declarative_source_definition_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + + def list_declarative_source_definitions(self, request: api.ListDeclarativeSourceDefinitionsRequest) -> api.ListDeclarativeSourceDefinitionsResponse: + r"""List declarative source definitions.""" + hook_ctx = HookContext(operation_id='listDeclarativeSourceDefinitions', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/workspaces/{workspaceId}/definitions/declarative_sources', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.ListDeclarativeSourceDefinitionsResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.DeclarativeSourceDefinitionsResponse]) + res.declarative_source_definitions_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + + def update_declarative_source_definition(self, request: api.UpdateDeclarativeSourceDefinitionRequest) -> api.UpdateDeclarativeSourceDefinitionResponse: + r"""Update declarative source definition details.""" + hook_ctx = HookContext(operation_id='updateDeclarativeSourceDefinition', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/workspaces/{workspaceId}/definitions/declarative_sources/{definitionId}', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + req_content_type, data, form = utils.serialize_request_body(request, api.UpdateDeclarativeSourceDefinitionRequest, "update_declarative_source_definition_request", False, False, 'json') + if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): + headers['content-type'] = req_content_type + if data is None and form is None: + raise Exception('request body is required') + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('PUT', url, params=query_params, data=data, files=form, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.UpdateDeclarativeSourceDefinitionResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.DeclarativeSourceDefinitionResponse]) + res.declarative_source_definition_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + diff --git a/src/airbyte_api/destinationdefinitions.py b/src/airbyte_api/destinationdefinitions.py new file mode 100644 index 00000000..7e5e3720 --- /dev/null +++ b/src/airbyte_api/destinationdefinitions.py @@ -0,0 +1,311 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +import requests as requests_http +from .sdkconfiguration import SDKConfiguration +from airbyte_api import api, errors, models, utils +from airbyte_api._hooks import AfterErrorContext, AfterSuccessContext, BeforeRequestContext, HookContext +from typing import Optional + +class DestinationDefinitions: + sdk_configuration: SDKConfiguration + + def __init__(self, sdk_config: SDKConfiguration) -> None: + self.sdk_configuration = sdk_config + + + + def create_destination_definition(self, request: api.CreateDestinationDefinitionRequest) -> api.CreateDestinationDefinitionResponse: + r"""Create a destination definition.""" + hook_ctx = HookContext(operation_id='createDestinationDefinition', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/workspaces/{workspaceId}/definitions/destinations', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + req_content_type, data, form = utils.serialize_request_body(request, api.CreateDestinationDefinitionRequest, "create_definition_request", False, False, 'json') + if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): + headers['content-type'] = req_content_type + if data is None and form is None: + raise Exception('request body is required') + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('POST', url, params=query_params, data=data, files=form, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.CreateDestinationDefinitionResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.DefinitionResponse]) + res.definition_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + + def delete_destination_definition(self, request: api.DeleteDestinationDefinitionRequest) -> api.DeleteDestinationDefinitionResponse: + r"""Delete a destination definition.""" + hook_ctx = HookContext(operation_id='deleteDestinationDefinition', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/workspaces/{workspaceId}/definitions/destinations/{definitionId}', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('DELETE', url, params=query_params, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.DeleteDestinationDefinitionResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.DefinitionResponse]) + res.definition_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + + def get_destination_definition(self, request: api.GetDestinationDefinitionRequest) -> api.GetDestinationDefinitionResponse: + r"""Get destination definition details.""" + hook_ctx = HookContext(operation_id='getDestinationDefinition', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/workspaces/{workspaceId}/definitions/destinations/{definitionId}', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.GetDestinationDefinitionResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.DefinitionResponse]) + res.definition_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + + def list_destination_definitions(self, request: api.ListDestinationDefinitionsRequest) -> api.ListDestinationDefinitionsResponse: + r"""List destination definitions.""" + hook_ctx = HookContext(operation_id='listDestinationDefinitions', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/workspaces/{workspaceId}/definitions/destinations', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.ListDestinationDefinitionsResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.DefinitionsResponse]) + res.definitions_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + + def update_destination_definition(self, request: api.UpdateDestinationDefinitionRequest) -> api.UpdateDestinationDefinitionResponse: + r"""Update destination definition details.""" + hook_ctx = HookContext(operation_id='updateDestinationDefinition', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/workspaces/{workspaceId}/definitions/destinations/{definitionId}', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + req_content_type, data, form = utils.serialize_request_body(request, api.UpdateDestinationDefinitionRequest, "update_definition_request", False, False, 'json') + if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): + headers['content-type'] = req_content_type + if data is None and form is None: + raise Exception('request body is required') + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('PUT', url, params=query_params, data=data, files=form, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.UpdateDestinationDefinitionResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.DefinitionResponse]) + res.definition_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + diff --git a/src/airbyte_api/destinations.py b/src/airbyte_api/destinations.py index a85305ad..60be0a64 100644 --- a/src/airbyte_api/destinations.py +++ b/src/airbyte_api/destinations.py @@ -65,7 +65,9 @@ def create_destination(self, request: Optional[models.DestinationCreateRequest] else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -114,7 +116,9 @@ def delete_destination(self, request: api.DeleteDestinationRequest) -> api.Delet if http_res.status_code == 204: pass - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -135,6 +139,7 @@ def get_destination(self, request: api.GetDestinationRequest) -> api.GetDestinat else: headers, query_params = utils.get_security(self.sdk_configuration.security) + query_params = { **utils.get_query_params(request), **query_params } headers['Accept'] = 'application/json' headers['user-agent'] = self.sdk_configuration.user_agent client = self.sdk_configuration.client @@ -169,7 +174,9 @@ def get_destination(self, request: api.GetDestinationRequest) -> api.GetDestinat else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -225,7 +232,9 @@ def list_destinations(self, request: api.ListDestinationsRequest) -> api.ListDes else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -283,7 +292,9 @@ def patch_destination(self, request: api.PatchDestinationRequest) -> api.PatchDe else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -341,7 +352,9 @@ def put_destination(self, request: api.PutDestinationRequest) -> api.PutDestinat else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) diff --git a/src/airbyte_api/health.py b/src/airbyte_api/health.py index 7b680fe0..0994f2b6 100644 --- a/src/airbyte_api/health.py +++ b/src/airbyte_api/health.py @@ -50,7 +50,9 @@ def get_health_check(self) -> api.GetHealthCheckResponse: if http_res.status_code == 200: pass - elif http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) diff --git a/src/airbyte_api/jobs.py b/src/airbyte_api/jobs.py index ca0ae1d7..9fe60c69 100644 --- a/src/airbyte_api/jobs.py +++ b/src/airbyte_api/jobs.py @@ -60,7 +60,9 @@ def cancel_job(self, request: api.CancelJobRequest) -> api.CancelJobResponse: else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -120,7 +122,9 @@ def create_job(self, request: models.JobCreateRequest) -> api.CreateJobResponse: else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -175,7 +179,9 @@ def get_job(self, request: api.GetJobRequest) -> api.GetJobResponse: else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -231,7 +237,9 @@ def list_jobs(self, request: api.ListJobsRequest) -> api.ListJobsResponse: else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) diff --git a/src/airbyte_api/models/__init__.py b/src/airbyte_api/models/__init__.py index 9c3442cd..6833625b 100644 --- a/src/airbyte_api/models/__init__.py +++ b/src/airbyte_api/models/__init__.py @@ -16,13 +16,21 @@ from .connectionsresponse import * from .connectionstatusenum import * from .connectionsyncmodeenum import * +from .createdeclarativesourcedefinitionrequest import * +from .createdefinitionrequest import * +from .declarativesourcedefinitionresponse import * +from .declarativesourcedefinitionsresponse import * +from .definitionresponse import * +from .definitionsresponse import * from .destination_astra import * from .destination_aws_datalake import * from .destination_azure_blob_storage import * from .destination_bigquery import * from .destination_clickhouse import * from .destination_convex import * +from .destination_customer_io import * from .destination_databricks import * +from .destination_deepset import * from .destination_dev_null import * from .destination_duckdb import * from .destination_dynamodb import * @@ -31,11 +39,12 @@ from .destination_firestore import * from .destination_gcs import * from .destination_google_sheets import * -from .destination_iceberg import * +from .destination_hubspot import * from .destination_milvus import * from .destination_mongodb import * from .destination_motherduck import * from .destination_mssql import * +from .destination_mssql_v2 import * from .destination_mysql import * from .destination_oracle import * from .destination_pgvector import * @@ -46,10 +55,12 @@ from .destination_redis import * from .destination_redshift import * from .destination_s3 import * -from .destination_s3_glue import * +from .destination_s3_data_lake import * +from .destination_salesforce import * from .destination_sftp_json import * from .destination_snowflake import * from .destination_snowflake_cortex import * +from .destination_surrealdb import * from .destination_teradata import * from .destination_timeplus import * from .destination_typesense import * @@ -63,10 +74,10 @@ from .destinationresponse import * from .destinationsresponse import * from .drift import * +from .emailnotificationconfig import * +from .encryptionmapperalgorithm import * from .facebook_marketing import * from .gcs import * -from .geographyenum import * -from .geographyenumnodefault import * from .github import * from .gitlab import * from .google_ads import * @@ -77,12 +88,13 @@ from .hubspot import * from .initiateoauthrequest import * from .instagram import * -from .intercom import * from .jobcreaterequest import * from .jobresponse import * from .jobsresponse import * from .jobstatusenum import * +from .jobtype import * from .jobtypeenum import * +from .jobtyperesourcelimit import * from .lever_hiring import * from .linkedin_ads import * from .mailchimp import * @@ -95,9 +107,11 @@ from .namespacedefinitionenumnodefault import * from .nonbreakingschemaupdatesbehaviorenum import * from .nonbreakingschemaupdatesbehaviorenumnodefault import * +from .notificationconfig import * +from .notificationsconfig import * from .notion import * from .oauthactornames import * -from .oauthcredentialsconfiguration import * +from .organizationoauthcredentialsrequest import * from .organizationresponse import * from .organizationsresponse import * from .permissioncreaterequest import * @@ -110,18 +124,23 @@ from .pinterest import * from .publicpermissiontype import * from .rd_station_marketing import * +from .resourcerequirements import * +from .rowfilteringoperation import * +from .rowfilteringoperationtype import * from .salesforce import * from .scheduletypeenum import * from .scheduletypewithbasicenum import * from .schemebasicauth import * from .schemeclientcredentials import * +from .scopedresourcerequirements import * from .security import * from .selectedfieldinfo import * +from .sharepoint_enterprise import * from .shopify import * from .slack import * from .smartsheets import * from .snapchat_marketing import * -from .snowflake import * +from .source_100ms import * from .source_7shifts import * from .source_activecampaign import * from .source_agilecrm import * @@ -131,6 +150,8 @@ from .source_airtable import * from .source_akeneo import * from .source_algolia import * +from .source_alpaca_broker_api import * +from .source_alpha_vantage import * from .source_amazon_ads import * from .source_amazon_seller_partner import * from .source_amazon_sqs import * @@ -140,13 +161,18 @@ from .source_appfigures import * from .source_appfollow import * from .source_apple_search_ads import * +from .source_appsflyer import * from .source_apptivo import * from .source_asana import * from .source_ashby import * +from .source_assemblyai import * from .source_auth0 import * +from .source_aviationstack import * +from .source_awin_advertiser import * from .source_aws_cloudtrail import * from .source_azure_blob_storage import * from .source_azure_table import * +from .source_babelforce import * from .source_bamboo_hr import * from .source_basecamp import * from .source_beamer import * @@ -155,9 +181,12 @@ from .source_bing_ads import * from .source_bitly import * from .source_blogger import * +from .source_bluetally import * +from .source_boldsign import * from .source_box import * from .source_braintree import * from .source_braze import * +from .source_breezometer import * from .source_breezy_hr import * from .source_brevo import * from .source_brex import * @@ -172,6 +201,7 @@ from .source_campayn import * from .source_canny import * from .source_capsule_crm import * +from .source_captain_data import * from .source_care_quality_commission import * from .source_cart import * from .source_castor_edc import * @@ -180,10 +210,12 @@ from .source_chargedesk import * from .source_chargify import * from .source_chartmogul import * +from .source_churnkey import * from .source_cimis import * from .source_cin7 import * from .source_circa import * from .source_circleci import * +from .source_cisco_meraki import * from .source_clarif_ai import * from .source_clazar import * from .source_clickhouse import * @@ -196,6 +228,7 @@ from .source_coda import * from .source_codefresh import * from .source_coin_api import * +from .source_coingecko_coins import * from .source_coinmarketcap import * from .source_concord import * from .source_configcat import * @@ -203,35 +236,48 @@ from .source_convertkit import * from .source_convex import * from .source_copper import * +from .source_couchbase import * from .source_countercyclical import * from .source_customer_io import * +from .source_customerly import * from .source_datadog import * from .source_datascope import * from .source_dbt import * from .source_delighted import * from .source_deputy import * +from .source_ding_connect import * from .source_dixa import * from .source_dockerhub import * +from .source_docuseal import * +from .source_dolibarr import * from .source_dremio import * from .source_drift import * from .source_drip import * from .source_dropbox_sign import * +from .source_dwolla import * from .source_dynamodb import * from .source_e_conomic import * from .source_easypost import * from .source_easypromos import * +from .source_ebay_finance import * +from .source_ebay_fulfillment import * from .source_elasticemail import * +from .source_elasticsearch import * from .source_emailoctopus import * from .source_employment_hero import * from .source_encharge import * from .source_eventbrite import * from .source_eventee import * from .source_eventzilla import * +from .source_everhour import * from .source_exchange_rates import * from .source_ezofficeinventory import * from .source_facebook_marketing import * +from .source_facebook_pages import * from .source_factorial import * from .source_faker import * +from .source_fastbill import * +from .source_fastly import * from .source_fauna import * from .source_file import * from .source_fillout import * @@ -257,10 +303,12 @@ from .source_freshservice import * from .source_front import * from .source_fulcrum import * +from .source_fullstory import * from .source_gainsight_px import * from .source_gcs import * from .source_getgist import * from .source_getlago import * +from .source_giphy import * from .source_gitbook import * from .source_github import * from .source_gitlab import * @@ -269,6 +317,7 @@ from .source_gnews import * from .source_gocardless import * from .source_goldcast import * +from .source_gologin import * from .source_gong import * from .source_google_ads import * from .source_google_analytics_data_api import * @@ -284,35 +333,48 @@ from .source_google_webfonts import * from .source_gorgias import * from .source_greenhouse import * +from .source_greythr import * from .source_gridly import * from .source_guru import * from .source_gutendex import * from .source_hardcoded_records import * +from .source_harness import * from .source_harvest import * from .source_height import * +from .source_hellobaton import * +from .source_help_scout import * from .source_hibob import * from .source_high_level import * +from .source_hoorayhr import * from .source_hubplanner import * from .source_hubspot import * +from .source_hugging_face_datasets import * from .source_humanitix import * +from .source_huntr import * from .source_illumina_basespace import * +from .source_imagga import * from .source_incident_io import * from .source_inflowinventory import * +from .source_insightful import * from .source_insightly import * from .source_instagram import * from .source_instatus import * from .source_intercom import * +from .source_intruder import * from .source_invoiced import * from .source_invoiceninja import * from .source_ip2whois import * from .source_iterable import * +from .source_jamf_pro import * from .source_jira import * from .source_jobnimbus import * from .source_jotform import * +from .source_judge_me_reviews import * from .source_just_sift import * from .source_justcall import * from .source_k6_cloud import * from .source_katana import * +from .source_keka import * from .source_kisi import * from .source_kissmetrics import * from .source_klarna import * @@ -325,6 +387,7 @@ from .source_less_annoying_crm import * from .source_lever_hiring import * from .source_lightspeed_retail import * +from .source_linear import * from .source_linkedin_ads import * from .source_linkedin_pages import * from .source_linnworks import * @@ -334,6 +397,7 @@ from .source_luma import * from .source_mailchimp import * from .source_mailerlite import * +from .source_mailersend import * from .source_mailgun import * from .source_mailjet_mail import * from .source_mailjet_sms import * @@ -341,7 +405,10 @@ from .source_mailtrap import * from .source_marketo import * from .source_marketstack import * +from .source_mendeley import * from .source_mention import * +from .source_mercado_ads import * +from .source_merge import * from .source_metabase import * from .source_microsoft_dataverse import * from .source_microsoft_entra_id import * @@ -362,9 +429,15 @@ from .source_mysql import * from .source_n8n import * from .source_nasa import * +from .source_navan import * +from .source_nebius_ai import * from .source_netsuite import * +from .source_netsuite_enterprise import * from .source_news_api import * +from .source_newsdata import * from .source_newsdata_io import * +from .source_nexiopay import * +from .source_ninjaone_rmm import * from .source_nocrm import * from .source_northpass_lms import * from .source_notion import * @@ -378,31 +451,41 @@ from .source_onesignal import * from .source_onfleet import * from .source_open_data_dc import * +from .source_open_exchange_rates import * from .source_openaq import * from .source_openfda import * from .source_openweather import * from .source_opinion_stage import * from .source_opsgenie import * +from .source_opuswatch import * from .source_oracle import * +from .source_oracle_enterprise import * from .source_orb import * -from .source_orbit import * from .source_oura import * from .source_outbrain_amplify import * from .source_outreach import * from .source_oveit import * from .source_pabbly_subscriptions_billing import * +from .source_paddle import * +from .source_pagerduty import * from .source_pandadoc import * from .source_paperform import * from .source_papersign import * from .source_pardot import * +from .source_partnerize import * +from .source_partnerstack import * +from .source_payfit import * from .source_paypal_transaction import * from .source_paystack import * from .source_pendo import * from .source_pennylane import * +from .source_perigon import * from .source_persistiq import * from .source_persona import * from .source_pexels_api import * +from .source_phyllo import * from .source_picqer import * +from .source_pingdom import * from .source_pinterest import * from .source_pipedrive import * from .source_pipeliner import * @@ -414,12 +497,14 @@ from .source_pocket import * from .source_pokeapi import * from .source_polygon_stock_api import * +from .source_poplar import * from .source_postgres import * from .source_posthog import * from .source_postmarkapp import * from .source_prestashop import * from .source_pretix import * from .source_primetric import * +from .source_printify import * from .source_productboard import * from .source_productive import * from .source_pypi import * @@ -437,10 +522,13 @@ from .source_rentcast import * from .source_repairshopr import * from .source_reply_io import * +from .source_retailexpress_by_maropost import * from .source_retently import * from .source_revenuecat import * from .source_revolut_merchant import * +from .source_ringcentral import * from .source_rki_covid import * +from .source_rocket_chat import * from .source_rocketlane import * from .source_rollbar import * from .source_rootly import * @@ -453,6 +541,7 @@ from .source_salesforce import * from .source_salesloft import * from .source_sap_fieldglass import * +from .source_sap_hana_enterprise import * from .source_savvycal import * from .source_scryfall import * from .source_secoda import * @@ -463,14 +552,21 @@ from .source_sendpulse import * from .source_senseforce import * from .source_sentry import * +from .source_serpstat import * +from .source_service_now import * from .source_sftp import * from .source_sftp_bulk import * +from .source_sharepoint_enterprise import * from .source_sharetribe import * from .source_shippo import * +from .source_shipstation import * from .source_shopify import * +from .source_shopwired import * from .source_shortcut import * from .source_shortio import * +from .source_shutterstock import * from .source_sigma_computing import * +from .source_signnow import * from .source_simfin import * from .source_simplecast import * from .source_simplesat import * @@ -487,6 +583,7 @@ from .source_spacex_api import * from .source_sparkpost import * from .source_split_io import * +from .source_spotify_ads import * from .source_spotlercrm import * from .source_square import * from .source_squarespace import * @@ -498,20 +595,26 @@ from .source_survey_sparrow import * from .source_surveymonkey import * from .source_survicate import * +from .source_svix import * from .source_systeme import * from .source_taboola import * +from .source_tavus import * from .source_teamtailor import * from .source_teamwork import * from .source_tempo import * from .source_testrail import * from .source_the_guardian_api import * from .source_thinkific import * +from .source_thinkific_courses import * +from .source_thrive_learning import * from .source_ticketmaster import * from .source_tickettailor import * from .source_tiktok_marketing import * from .source_timely import * from .source_tinyemail import * +from .source_tmdb import * from .source_todoist import * +from .source_toggl import * from .source_track_pms import * from .source_trello import * from .source_tremendous import * @@ -521,10 +624,12 @@ from .source_twilio import * from .source_twilio_taskrouter import * from .source_twitter import * +from .source_tyntec_sms import * from .source_typeform import * from .source_ubidots import * from .source_unleash import * from .source_uppromote import * +from .source_uptick import * from .source_us_census import * from .source_uservoice import * from .source_vantage import * @@ -535,6 +640,7 @@ from .source_vwo import * from .source_waiteraid import * from .source_wasabi_stats_api import * +from .source_watchmode import * from .source_weatherstack import * from .source_web_scrapper import * from .source_webflow import * @@ -544,6 +650,7 @@ from .source_woocommerce import * from .source_wordpress import * from .source_workable import * +from .source_workday import * from .source_workflowmax import * from .source_workramp import * from .source_wrike import * @@ -554,9 +661,12 @@ from .source_yandex_metrica import * from .source_yotpo import * from .source_you_need_a_budget_ynab import * +from .source_younium import * +from .source_yousign import * from .source_youtube_analytics import * from .source_youtube_data import * from .source_zapier_supported_storage import * +from .source_zapsign import * from .source_zendesk_chat import * from .source_zendesk_sunshine import * from .source_zendesk_support import * @@ -583,21 +693,28 @@ from .sourcesresponse import * from .streamconfiguration import * from .streamconfigurations import * +from .streamconfigurations_input import * from .streammappertype import * from .streamproperties import * from .surveymonkey import * +from .tag import * +from .tagcreaterequest import * +from .tagpatchrequest import * +from .tagresponse import * +from .tagsresponse import * from .tiktok_marketing import * from .typeform import * +from .updatedeclarativesourcedefinitionrequest import * +from .updatedefinitionrequest import * from .userresponse import * from .usersresponse import * +from .webhooknotificationconfig import * from .workspacecreaterequest import * from .workspaceoauthcredentialsrequest import * from .workspaceresponse import * from .workspacesresponse import * from .workspaceupdaterequest import * from .youtube_analytics import * -from .zendesk_chat import * from .zendesk_support import * from .zendesk_talk import * -__all__ = ["APIAccessToken","APIEndpoint","APIEndpointPrefix","APIKey","APIKeyAuth","APIKeySecret","APIParameterConfigModel","APIPassword","APIServer","APIToken","AWSEnvironment","AWSRegion","AWSS3Staging","AWSSellerPartnerAccountType","AccessToken","AccessTokenIsRequiredForAuthenticationRequests","AccountNames","ActionReportTime","Activecampaign","ActorTypeEnum","AdAnalyticsReportConfiguration","Agilecrm","Aha","Airbyte","AirbyteAPIConnectionSchedule","Aircall","Airtable","Akeneo","Algolia","Allow","AmazonAds","AmazonSellerPartner","AmazonSqs","Amplitude","AndGroup","ApifyDataset","Appcues","Appfigures","Appfollow","AppleSearchAds","Applications","Apptivo","Asana","AsanaCredentials","Ashby","Astra","Auth0","AuthMethod","AuthType","AuthenticateViaAPIKey","AuthenticateViaAccessKeys","AuthenticateViaAsanaOauth","AuthenticateViaFacebookMarketingOauth","AuthenticateViaGoogleOAuth","AuthenticateViaHarvestOAuth","AuthenticateViaLeverAPIKey","AuthenticateViaLeverOAuth","AuthenticateViaMicrosoft","AuthenticateViaMicrosoftOAuth","AuthenticateViaMicrosoftOAuth20","AuthenticateViaOAuth","AuthenticateViaOAuth20","AuthenticateViaOauth2","AuthenticateViaPassword","AuthenticateViaPrivateKey","AuthenticateViaRetentlyOAuth","AuthenticateViaStorageAccountKey","AuthenticateWithAPIToken","AuthenticateWithPersonalAccessToken","Authentication","AuthenticationMechanism","AuthenticationMethod","AuthenticationMode","AuthenticationType","AuthenticationViaGoogleOAuth","AuthenticationWildcard","Authorization","AuthorizationMethod","AuthorizationType","Autogenerated","AvroApacheAvro","AvroFormat","AwsCloudtrail","AwsDatalake","AzBlobAzureBlobStorage","AzureBlobStorage","AzureBlobStorageCredentials","AzureOpenAI","AzureTable","BambooHr","BaseURL","BaseURLPrefix","Basecamp","Basic","BatchedStandardInserts","Beamer","BetweenFilter","Bigmailer","Bigquery","BingAds","Bitly","Blogger","BothUsernameAndPasswordIsRequiredForAuthenticationRequest","Box","Braintree","Braze","BreezyHr","Brevo","Brex","Bugsnag","Buildkite","BunnyInc","Buzzsprout","ByMarkdownHeader","ByProgrammingLanguage","BySeparator","Bzip2","CSVCommaSeparatedValues","CSVFormat","CSVHeaderDefinition","CacheType","CalCom","Calendly","Callrail","CampaignMonitor","Campayn","Canny","CapsuleCrm","CaptureModeAdvanced","CareQualityCommission","Cart","CastorEdc","CatalogType","Categories","Category","CentralAPIRouter","Chameleon","Chargebee","Chargedesk","Chargify","Chartmogul","ChooseHowToPartitionData","Cimis","Cin7","Circa","Circleci","ClarifAi","Clazar","ClickWindowDays","Clickhouse","ClickupAPI","Clockify","Clockodo","CloseCom","Cloudbeds","ClusterType","Coassemble","Coda","Codec","Codefresh","Cohere","CohortReportSettings","CohortReports","Cohorts","CohortsRange","CoinAPI","Coinmarketcap","Collection","Compression","CompressionCodec","CompressionCodecOptional","CompressionType","Concord","Configcat","ConfiguredStreamMapper","Confluence","ConnectBy","ConnectionCreateRequest","ConnectionPatchRequest","ConnectionResponse","ConnectionScheduleResponse","ConnectionStatusEnum","ConnectionSyncModeEnum","ConnectionType","ConnectionsResponse","ContentType","ConversionReportTime","Convertkit","Convex","Copper","CopyRawFiles","Countercyclical","Country","Credential","CredentialType","Credentials","CredentialsTitle","CustomQueriesArray","CustomReportConfig","CustomerIo","CustomerStatus","DataCenter","DataCenterID","DataCenterLocation","DataFreshness","DataRegion","DataSource","DataType","Databricks","Datadog","Datascope","DatasetLocation","DateRange","Dbt","DefaultVectorizer","Deflate","DeletionMode","Delighted","DeliveryMethod","DeliveryType","Deputy","DestinationAstra","DestinationAstraLanguage","DestinationAstraMode","DestinationAstraSchemasEmbeddingEmbedding1Mode","DestinationAstraSchemasEmbeddingEmbeddingMode","DestinationAstraSchemasEmbeddingMode","DestinationAstraSchemasMode","DestinationAstraSchemasProcessingMode","DestinationAstraSchemasProcessingTextSplitterMode","DestinationAstraSchemasProcessingTextSplitterTextSplitterMode","DestinationAwsDatalake","DestinationAwsDatalakeCompressionCodecOptional","DestinationAwsDatalakeCredentialsTitle","DestinationAwsDatalakeFormatTypeWildcard","DestinationAzureBlobStorage","DestinationAzureBlobStorageAzureBlobStorage","DestinationAzureBlobStorageFormatType","DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON","DestinationBigquery","DestinationBigqueryCredentialType","DestinationBigqueryHMACKey","DestinationBigqueryMethod","DestinationClickhouse","DestinationClickhouseSchemasTunnelMethod","DestinationClickhouseTunnelMethod","DestinationConfiguration","DestinationConvex","DestinationCreateRequest","DestinationDatabricks","DestinationDatabricksAuthType","DestinationDatabricksSchemasAuthType","DestinationDevNull","DestinationDevNullLoggingType","DestinationDevNullSchemasLoggingType","DestinationDevNullSchemasTestDestinationTestDestinationType","DestinationDevNullSchemasTestDestinationType","DestinationDevNullTestDestinationType","DestinationDuckdb","DestinationDynamodb","DestinationElasticsearch","DestinationElasticsearchMethod","DestinationElasticsearchNoTunnel","DestinationElasticsearchPasswordAuthentication","DestinationElasticsearchSSHKeyAuthentication","DestinationElasticsearchSSHTunnelMethod","DestinationElasticsearchSchemasAuthenticationMethodMethod","DestinationElasticsearchSchemasMethod","DestinationElasticsearchSchemasTunnelMethod","DestinationElasticsearchSchemasTunnelMethodTunnelMethod","DestinationElasticsearchTunnelMethod","DestinationFirebolt","DestinationFireboltLoadingMethod","DestinationFireboltMethod","DestinationFireboltSchemasMethod","DestinationFirestore","DestinationGcs","DestinationGcsAuthentication","DestinationGcsCSVCommaSeparatedValues","DestinationGcsCodec","DestinationGcsCompression","DestinationGcsCompressionCodec","DestinationGcsCompressionType","DestinationGcsFormatType","DestinationGcsGZIP","DestinationGcsGcs","DestinationGcsJSONLinesNewlineDelimitedJSON","DestinationGcsNoCompression","DestinationGcsOutputFormat","DestinationGcsParquetColumnarStorage","DestinationGcsSchemasCodec","DestinationGcsSchemasCompressionType","DestinationGcsSchemasFormatCodec","DestinationGcsSchemasFormatCompressionType","DestinationGcsSchemasFormatFormatType","DestinationGcsSchemasFormatOutputFormat1Codec","DestinationGcsSchemasFormatOutputFormatCodec","DestinationGcsSchemasFormatOutputFormatFormatType","DestinationGcsSchemasFormatType","DestinationGcsSchemasNoCompression","DestinationGoogleSheets","DestinationGoogleSheetsGoogleSheets","DestinationIceberg","DestinationIcebergCatalogType","DestinationIcebergS3","DestinationIcebergS3BucketRegion","DestinationIcebergSchemasCatalogConfigCatalogType","DestinationIcebergSchemasCatalogConfigIcebergCatalogConfigCatalogType","DestinationIcebergSchemasCatalogType","DestinationIcebergStorageType","DestinationMilvus","DestinationMilvusAPIToken","DestinationMilvusAuthentication","DestinationMilvusAzureOpenAI","DestinationMilvusByMarkdownHeader","DestinationMilvusByProgrammingLanguage","DestinationMilvusBySeparator","DestinationMilvusCohere","DestinationMilvusEmbedding","DestinationMilvusFake","DestinationMilvusFieldNameMappingConfigModel","DestinationMilvusIndexing","DestinationMilvusLanguage","DestinationMilvusMode","DestinationMilvusOpenAI","DestinationMilvusOpenAICompatible","DestinationMilvusProcessingConfigModel","DestinationMilvusSchemasEmbeddingEmbedding5Mode","DestinationMilvusSchemasEmbeddingEmbeddingMode","DestinationMilvusSchemasEmbeddingMode","DestinationMilvusSchemasIndexingAuthAuthenticationMode","DestinationMilvusSchemasIndexingAuthMode","DestinationMilvusSchemasIndexingMode","DestinationMilvusSchemasMode","DestinationMilvusSchemasProcessingMode","DestinationMilvusSchemasProcessingTextSplitterMode","DestinationMilvusSchemasProcessingTextSplitterTextSplitterMode","DestinationMilvusTextSplitter","DestinationMilvusUsernamePassword","DestinationMongodb","DestinationMongodbAuthorization","DestinationMongodbInstance","DestinationMongodbNoTunnel","DestinationMongodbNone","DestinationMongodbPasswordAuthentication","DestinationMongodbSSHKeyAuthentication","DestinationMongodbSSHTunnelMethod","DestinationMongodbSchemasAuthorization","DestinationMongodbSchemasInstance","DestinationMongodbSchemasTunnelMethod","DestinationMongodbSchemasTunnelMethodTunnelMethod","DestinationMongodbTunnelMethod","DestinationMotherduck","DestinationMssql","DestinationMssqlNoTunnel","DestinationMssqlPasswordAuthentication","DestinationMssqlSSHKeyAuthentication","DestinationMssqlSSHTunnelMethod","DestinationMssqlSchemasSslMethod","DestinationMssqlSchemasSslMethodSslMethod","DestinationMssqlSchemasTunnelMethod","DestinationMssqlSchemasTunnelMethodTunnelMethod","DestinationMssqlSslMethod","DestinationMssqlTunnelMethod","DestinationMysql","DestinationMysqlNoTunnel","DestinationMysqlPasswordAuthentication","DestinationMysqlSSHKeyAuthentication","DestinationMysqlSSHTunnelMethod","DestinationMysqlSchemasTunnelMethod","DestinationMysqlSchemasTunnelMethodTunnelMethod","DestinationMysqlTunnelMethod","DestinationOracle","DestinationOracleEncryptionMethod","DestinationOracleNoTunnel","DestinationOraclePasswordAuthentication","DestinationOracleSSHKeyAuthentication","DestinationOracleSSHTunnelMethod","DestinationOracleSchemasEncryptionMethod","DestinationOracleSchemasTunnelMethod","DestinationOracleSchemasTunnelMethodTunnelMethod","DestinationOracleTunnelMethod","DestinationOracleUnencrypted","DestinationPatchRequest","DestinationPgvector","DestinationPgvectorAzureOpenAI","DestinationPgvectorByMarkdownHeader","DestinationPgvectorByProgrammingLanguage","DestinationPgvectorBySeparator","DestinationPgvectorCohere","DestinationPgvectorCredentials","DestinationPgvectorEmbedding","DestinationPgvectorFake","DestinationPgvectorFieldNameMappingConfigModel","DestinationPgvectorLanguage","DestinationPgvectorMode","DestinationPgvectorOpenAI","DestinationPgvectorOpenAICompatible","DestinationPgvectorProcessingConfigModel","DestinationPgvectorSchemasEmbeddingEmbedding5Mode","DestinationPgvectorSchemasEmbeddingEmbeddingMode","DestinationPgvectorSchemasEmbeddingMode","DestinationPgvectorSchemasMode","DestinationPgvectorSchemasProcessingMode","DestinationPgvectorSchemasProcessingTextSplitterMode","DestinationPgvectorSchemasProcessingTextSplitterTextSplitterMode","DestinationPgvectorTextSplitter","DestinationPinecone","DestinationPineconeAzureOpenAI","DestinationPineconeByMarkdownHeader","DestinationPineconeByProgrammingLanguage","DestinationPineconeBySeparator","DestinationPineconeCohere","DestinationPineconeEmbedding","DestinationPineconeFake","DestinationPineconeFieldNameMappingConfigModel","DestinationPineconeIndexing","DestinationPineconeLanguage","DestinationPineconeMode","DestinationPineconeOpenAI","DestinationPineconeOpenAICompatible","DestinationPineconeProcessingConfigModel","DestinationPineconeSchemasEmbeddingEmbedding5Mode","DestinationPineconeSchemasEmbeddingEmbeddingMode","DestinationPineconeSchemasEmbeddingMode","DestinationPineconeSchemasMode","DestinationPineconeSchemasProcessingMode","DestinationPineconeSchemasProcessingTextSplitterMode","DestinationPineconeSchemasProcessingTextSplitterTextSplitterMode","DestinationPineconeTextSplitter","DestinationPostgres","DestinationPostgresMode","DestinationPostgresNoTunnel","DestinationPostgresPasswordAuthentication","DestinationPostgresSSHKeyAuthentication","DestinationPostgresSSHTunnelMethod","DestinationPostgresSchemasMode","DestinationPostgresSchemasSSLModeSSLModes1Mode","DestinationPostgresSchemasSSLModeSSLModes6Mode","DestinationPostgresSchemasSSLModeSSLModesMode","DestinationPostgresSchemasSslModeMode","DestinationPostgresSchemasTunnelMethod","DestinationPostgresSchemasTunnelMethodTunnelMethod","DestinationPostgresTunnelMethod","DestinationPubsub","DestinationPutRequest","DestinationQdrant","DestinationQdrantAuthenticationMethod","DestinationQdrantAzureOpenAI","DestinationQdrantByMarkdownHeader","DestinationQdrantByProgrammingLanguage","DestinationQdrantBySeparator","DestinationQdrantCohere","DestinationQdrantEmbedding","DestinationQdrantFake","DestinationQdrantFieldNameMappingConfigModel","DestinationQdrantIndexing","DestinationQdrantLanguage","DestinationQdrantMode","DestinationQdrantNoAuth","DestinationQdrantOpenAI","DestinationQdrantOpenAICompatible","DestinationQdrantProcessingConfigModel","DestinationQdrantSchemasEmbeddingEmbedding5Mode","DestinationQdrantSchemasEmbeddingEmbeddingMode","DestinationQdrantSchemasEmbeddingMode","DestinationQdrantSchemasIndexingAuthMethodMode","DestinationQdrantSchemasIndexingMode","DestinationQdrantSchemasMode","DestinationQdrantSchemasProcessingMode","DestinationQdrantSchemasProcessingTextSplitterMode","DestinationQdrantSchemasProcessingTextSplitterTextSplitterMode","DestinationQdrantTextSplitter","DestinationRedis","DestinationRedisDisable","DestinationRedisMode","DestinationRedisNoTunnel","DestinationRedisPasswordAuthentication","DestinationRedisSSHKeyAuthentication","DestinationRedisSSHTunnelMethod","DestinationRedisSSLModes","DestinationRedisSchemasMode","DestinationRedisSchemasTunnelMethod","DestinationRedisSchemasTunnelMethodTunnelMethod","DestinationRedisTunnelMethod","DestinationRedisVerifyFull","DestinationRedshift","DestinationRedshiftMethod","DestinationRedshiftNoTunnel","DestinationRedshiftPasswordAuthentication","DestinationRedshiftS3BucketRegion","DestinationRedshiftSSHKeyAuthentication","DestinationRedshiftSSHTunnelMethod","DestinationRedshiftSchemasTunnelMethod","DestinationRedshiftSchemasTunnelMethodTunnelMethod","DestinationRedshiftTunnelMethod","DestinationResponse","DestinationS3","DestinationS3AvroApacheAvro","DestinationS3Bzip2","DestinationS3CSVCommaSeparatedValues","DestinationS3Codec","DestinationS3Compression","DestinationS3CompressionCodec","DestinationS3CompressionType","DestinationS3Deflate","DestinationS3Flattening","DestinationS3FormatType","DestinationS3GZIP","DestinationS3Glue","DestinationS3GlueCompression","DestinationS3GlueCompressionType","DestinationS3GlueFormatType","DestinationS3GlueGZIP","DestinationS3GlueJSONLinesNewlineDelimitedJSON","DestinationS3GlueNoCompression","DestinationS3GlueOutputFormat","DestinationS3GlueS3BucketRegion","DestinationS3GlueSchemasCompressionType","DestinationS3JSONLinesNewlineDelimitedJSON","DestinationS3NoCompression","DestinationS3OutputFormat","DestinationS3ParquetColumnarStorage","DestinationS3S3BucketRegion","DestinationS3SchemasCodec","DestinationS3SchemasCompression","DestinationS3SchemasCompressionCodec","DestinationS3SchemasCompressionType","DestinationS3SchemasFlattening","DestinationS3SchemasFormatCodec","DestinationS3SchemasFormatCompressionType","DestinationS3SchemasFormatFormatType","DestinationS3SchemasFormatNoCompression","DestinationS3SchemasFormatOutputFormat3Codec","DestinationS3SchemasFormatOutputFormat3CompressionCodecCodec","DestinationS3SchemasFormatOutputFormatCodec","DestinationS3SchemasFormatOutputFormatCompressionType","DestinationS3SchemasFormatOutputFormatFormatType","DestinationS3SchemasFormatType","DestinationS3SchemasGZIP","DestinationS3SchemasNoCompression","DestinationS3Snappy","DestinationS3Xz","DestinationS3Zstandard","DestinationSftpJSON","DestinationSnowflake","DestinationSnowflakeAuthType","DestinationSnowflakeCortex","DestinationSnowflakeCortexAzureOpenAI","DestinationSnowflakeCortexByMarkdownHeader","DestinationSnowflakeCortexByProgrammingLanguage","DestinationSnowflakeCortexBySeparator","DestinationSnowflakeCortexCohere","DestinationSnowflakeCortexCredentials","DestinationSnowflakeCortexEmbedding","DestinationSnowflakeCortexFake","DestinationSnowflakeCortexFieldNameMappingConfigModel","DestinationSnowflakeCortexLanguage","DestinationSnowflakeCortexMode","DestinationSnowflakeCortexOpenAI","DestinationSnowflakeCortexOpenAICompatible","DestinationSnowflakeCortexProcessingConfigModel","DestinationSnowflakeCortexSchemasEmbeddingEmbedding5Mode","DestinationSnowflakeCortexSchemasEmbeddingEmbeddingMode","DestinationSnowflakeCortexSchemasEmbeddingMode","DestinationSnowflakeCortexSchemasMode","DestinationSnowflakeCortexSchemasProcessingMode","DestinationSnowflakeCortexSchemasProcessingTextSplitterMode","DestinationSnowflakeCortexSchemasProcessingTextSplitterTextSplitterMode","DestinationSnowflakeCortexTextSplitter","DestinationSnowflakeOAuth20","DestinationSnowflakeSchemasAuthType","DestinationSnowflakeSchemasCredentialsAuthType","DestinationSnowflakeSnowflake","DestinationTeradata","DestinationTeradataAllow","DestinationTeradataDisable","DestinationTeradataMode","DestinationTeradataPrefer","DestinationTeradataRequire","DestinationTeradataSSLModes","DestinationTeradataSchemasMode","DestinationTeradataSchemasSSLModeSSLModes5Mode","DestinationTeradataSchemasSSLModeSSLModes6Mode","DestinationTeradataSchemasSSLModeSSLModesMode","DestinationTeradataSchemasSslModeMode","DestinationTeradataVerifyCa","DestinationTeradataVerifyFull","DestinationTimeplus","DestinationTypesense","DestinationVectara","DestinationWeaviate","DestinationWeaviateAPIToken","DestinationWeaviateAuthentication","DestinationWeaviateAzureOpenAI","DestinationWeaviateByMarkdownHeader","DestinationWeaviateByProgrammingLanguage","DestinationWeaviateBySeparator","DestinationWeaviateCohere","DestinationWeaviateEmbedding","DestinationWeaviateFake","DestinationWeaviateFieldNameMappingConfigModel","DestinationWeaviateIndexing","DestinationWeaviateLanguage","DestinationWeaviateMode","DestinationWeaviateOpenAI","DestinationWeaviateOpenAICompatible","DestinationWeaviateProcessingConfigModel","DestinationWeaviateSchemasEmbeddingEmbedding5Mode","DestinationWeaviateSchemasEmbeddingEmbedding6Mode","DestinationWeaviateSchemasEmbeddingEmbedding7Mode","DestinationWeaviateSchemasEmbeddingEmbeddingMode","DestinationWeaviateSchemasEmbeddingMode","DestinationWeaviateSchemasIndexingAuthAuthenticationMode","DestinationWeaviateSchemasIndexingAuthMode","DestinationWeaviateSchemasIndexingMode","DestinationWeaviateSchemasMode","DestinationWeaviateSchemasProcessingMode","DestinationWeaviateSchemasProcessingTextSplitterMode","DestinationWeaviateSchemasProcessingTextSplitterTextSplitterMode","DestinationWeaviateTextSplitter","DestinationWeaviateUsernamePassword","DestinationYellowbrick","DestinationYellowbrickAllow","DestinationYellowbrickDisable","DestinationYellowbrickMode","DestinationYellowbrickNoTunnel","DestinationYellowbrickPasswordAuthentication","DestinationYellowbrickPrefer","DestinationYellowbrickRequire","DestinationYellowbrickSSHKeyAuthentication","DestinationYellowbrickSSHTunnelMethod","DestinationYellowbrickSSLModes","DestinationYellowbrickSchemasMode","DestinationYellowbrickSchemasSSLModeSSLModes5Mode","DestinationYellowbrickSchemasSSLModeSSLModes6Mode","DestinationYellowbrickSchemasSSLModeSSLModesMode","DestinationYellowbrickSchemasSslModeMode","DestinationYellowbrickSchemasTunnelMethod","DestinationYellowbrickSchemasTunnelMethodTunnelMethod","DestinationYellowbrickTunnelMethod","DestinationYellowbrickVerifyCa","DestinationYellowbrickVerifyFull","DestinationsResponse","DetailType","DetectChangesWithXminSystemColumn","DevNull","Dimension","DimensionsFilter","Disable","Disabled","DistanceMetric","Dixa","Dockerhub","DocumentFileTypeFormatExperimental","Domain","DomainRegionCode","DoubleValue","Dremio","Drift","DriftCredentials","Drip","DropboxSign","Duckdb","DynamoDBRegion","Dynamodb","EConomic","EUBasedAccount","Easypost","Easypromos","Elasticemail","Elasticsearch","Emailoctopus","Embedding","EmploymentHero","Enabled","Encharge","EncryptedTrustServerCertificate","EncryptedVerifyCertificate","Encryption","EncryptionAlgorithm","EncryptionMethod","EngagementWindowDays","Enterprise","Entity","Environment","Eventbrite","Eventee","Eventzilla","EveryNThEntry","ExcelFormat","ExchangeRates","Expression","ExternalTableViaS3","Ezofficeinventory","FacebookMarketing","FacebookMarketingCredentials","Factorial","Failing","Fake","Faker","Fauna","FieldNameMappingConfigModel","File","FileBasedStreamConfig","FileFormat","FileStorageFormat","Filetype","Fillout","Filter","FilterAppliedWhileFetchingRecordsBasedOnAttributeKeyAndAttributeValueWhichWillBeAppendedOnTheRequestBody","FilterName","FilterType","Finage","FinancialModelling","Finnhub","Finnworlds","Firebolt","Firehydrant","Firestore","FirstNEntries","Flattening","Fleetio","Flexmail","Flexport","Float","Flowlu","Format","FormatType","FormatTypeWildcard","Formbricks","FreeAgentConnector","Freightview","Freshbooks","Freshcaller","Freshchat","Freshdesk","Freshsales","Freshservice","FromCSV","FromField","FromValue","Front","Fulcrum","GCSBucketRegion","GCSGoogleCloudStorage","GCSStaging","GCSTmpFilesAfterwardProcessing","GainsightPx","Gcs","GcsCredentials","GeographyEnum","GeographyEnumNoDefault","Getgist","Getlago","Gitbook","Github","GithubCredentials","Gitlab","GitlabCredentials","Glassfrog","GlobalAccount","GlueCatalog","Gmail","Gnews","GoCardlessAPIEnvironment","Gocardless","Goldcast","Gong","GoogleAds","GoogleAdsCredentials","GoogleAnalyticsDataAPI","GoogleAnalyticsDataAPICredentials","GoogleCalendar","GoogleClassroom","GoogleCredentials","GoogleDirectory","GoogleDrive","GoogleDriveCredentials","GoogleForms","GooglePagespeedInsights","GoogleSearchConsole","GoogleSheets","GoogleSheetsCredentials","GoogleTasks","GoogleWebfonts","Gorgias","Granularity","GranularityForGeoLocationRegion","GranularityForPeriodicReports","Greenhouse","Gridly","GroupBy","Guru","Gutendex","Gzip","HMACKey","HTTPSPublicWeb","HadoopCatalogUseHierarchicalFileSystemsAsSameAsStorageConfig","HardcodedRecords","Harvest","Header","HeaderDefinitionType","Height","Hibob","HighLevel","HiveCatalogUseApacheHiveMetaStore","Hubplanner","Hubspot","HubspotCredentials","Humanitix","IAMRole","IAMUser","Iceberg","IcebergCatalogConfig","IlluminaBasespace","In","InListFilter","IncidentIo","Indexing","Inflowinventory","InitiateOauthRequest","InsightConfig","Insightly","Instagram","Instance","Instatus","Int64Value","Intercom","Interval","InvalidCDCPositionBehaviorAdvanced","Invoiced","Invoiceninja","Ip2whois","Iterable","JSONLinesNewlineDelimitedJSON","JdbcCatalogUseRelationalDatabase","Jira","JobCreateRequest","JobResponse","JobStatusEnum","JobTypeEnum","Jobnimbus","JobsResponse","Jotform","JsonlFormat","JustSift","Justcall","K6Cloud","Katana","KeyPairAuthentication","Kind","Kisi","Kissmetrics","Klarna","KlausAPI","Klaviyo","Kyve","LSNCommitBehaviour","Lang","Language","Launchdarkly","Leadfeeder","Lemlist","LessAnnoyingCrm","Level","LeverHiring","LeverHiringCredentials","LightspeedRetail","LinkedinAds","LinkedinAdsCredentials","LinkedinPages","Linnworks","LoadingMethod","Lob","Local","LocalFilesystemLimited","Logging","LoggingConfiguration","LoggingType","LoginPassword","Lokalise","Looker","Luma","Mailchimp","MailchimpCredentials","Mailerlite","Mailgun","MailjetMail","MailjetSms","Mailosaur","Mailtrap","MapperConfiguration","MarketNewsCategory","Marketo","Marketstack","Mention","Metabase","Method","MetricsFilter","MicrosoftDataverse","MicrosoftEntraID","MicrosoftLists","MicrosoftOnedrive","MicrosoftOnedriveCredentials","MicrosoftSharepoint","MicrosoftSharepointCredentials","MicrosoftTeams","MicrosoftTeamsCredentials","Milvus","Miro","Missive","Mixmax","Mixpanel","Mode","Monday","MondayCredentials","MongoDBAtlas","MongoDBAtlasReplicaSet","MongoDbInstanceType","Mongodb","MongodbV2","Motherduck","Mssql","Mux","MyHours","Mysql","N8n","NamespaceDefinitionEnum","NamespaceDefinitionEnumNoDefault","Nasa","NativeNetworkEncryptionNNE","Netsuite","NewsAPI","NewsdataIo","NoAuth","NoAuthentication","NoCompression","NoExternalEmbedding","NoTunnel","Nocrm","NonBreakingSchemaUpdatesBehaviorEnum","NonBreakingSchemaUpdatesBehaviorEnumNoDefault","NoneT","Normalization","NormalizationFlattening","NorthpassLms","NotExpression","Notion","NotionCredentials","Nullable","NumericFilter","Nutshell","Nylas","Nytimes","OAuth","OAuth20","OAuth20Credentials","OAuth20WithPrivateKey","OAuth2AccessToken","OAuth2ConfidentialApplication","OAuth2Recommended","OAuthActorNames","OAuthCredentialsConfiguration","OauthAuthentication","Okta","Omnisend","Oncehub","Onepagecrm","Onesignal","Onfleet","OpenAI","OpenAICompatible","OpenDataDc","Openaq","Openfda","Openweather","Operator","OpinionStage","Opsgenie","OptionTitle","OptionsList","OrGroup","Oracle","Orb","Orbit","OrganizationResponse","OrganizationsResponse","OriginDatacenterOfTheSurveyMonkeyAccount","Oura","OutbrainAmplify","OutputFormat","OutputFormatWildcard","Outreach","Oveit","PabblySubscriptionsBilling","Pandadoc","Paperform","Papersign","Pardot","ParquetColumnarStorage","ParquetFormat","ParsingStrategy","PasswordAuthentication","PaypalTransaction","Paystack","Pendo","Pennylane","PeriodUsedForMostPopularStreams","PermissionCreateRequest","PermissionResponse","PermissionResponseRead","PermissionScope","PermissionType","PermissionUpdateRequest","PermissionsResponse","Persistiq","Persona","PersonalAccessToken","PexelsAPI","Pgvector","Picqer","Pinecone","Pinterest","PinterestCredentials","Pipedrive","Pipeliner","PivotCategory","PivotalTracker","Piwik","Plaid","PlaidEnvironment","Planhat","Plausible","Plugin","Pocket","Pokeapi","PokemonName","PolygonStockAPI","Postgres","PostgresConnection","Posthog","Postmarkapp","Prefer","Preferred","Prestashop","Pretix","Primetric","PrivateApp","PrivateToken","Processing","ProcessingConfigModel","ProductCatalog","Productboard","Productive","ProjectSecret","PublicPermissionType","Pubsub","Pypi","Qdrant","Qualaroo","Queries","Quickbooks","RESTCatalog","Railz","RandomSampling","Range","RdStationMarketing","RdStationMarketingAuthorization","ReadChangesUsingBinaryLogCDC","ReadChangesUsingChangeDataCaptureCDC","ReadChangesUsingWriteAheadLogCDC","Recharge","Recreation","Recruitee","Recurly","Reddit","Redis","Redshift","Referralhero","Region","Rentcast","Repairshopr","ReplicaSet","ReplicateRecords","ReplyIo","ReportConfig","ReportName","ReportOptions","ReportRecordTypeEnum","ReportingDataObject","Require","Required","Retently","Revenuecat","RevolutMerchant","RkiCovid","Rocketlane","RoleBasedAuthentication","Rollbar","Rootly","Rss","Ruddr","S3","S3AmazonWebServices","S3BucketRegion","S3Glue","SCPSecureCopyProtocol","SFTPSecureFileTransferProtocol","SQLInserts","SSHKeyAuthentication","SSHSecureShell","SSHTunnelMethod","SSLMethod","SSLModes","Safetyculture","SageHr","Salesflare","Salesforce","Salesloft","SandboxAccessToken","SapFieldglass","Savvycal","ScanChangesWithUserDefinedCursor","ScheduleTypeEnum","ScheduleTypeWithBasicEnum","SchemeBasicAuth","SchemeClientCredentials","ScopeType","Scryfall","SearchCriteria","SearchIn","SearchScope","Secoda","Security","Segment","SelectedFieldInfo","SelfManagedReplicaSet","Sendgrid","Sendinblue","Sendowl","Sendpulse","Senseforce","Sentry","SerializationLibrary","ServerManaged","ServiceAccount","ServiceAccountAuthentication","ServiceAccountKey","ServiceAccountKeyAuthentication","ServiceKeyAuthentication","ServiceName","Sevenshifts","Sftp","SftpBulk","SftpJSON","ShareTypeUsedForMostPopularSharedStream","Sharetribe","Shippo","Shopify","ShopifyAuthorizationMethod","ShopifyCredentials","Shortcut","Shortio","SigmaComputing","SignInViaGoogleOAuth","SignInViaRDStationOAuth","SignInViaSlackOAuth","Silent","Simfin","Simplecast","Simplesat","SingleStoreAccessToken","Site","Slack","SlackCredentials","Smaily","Smartengage","Smartreach","Smartsheets","SmartsheetsCredentials","Smartwaiver","SnapchatMarketing","Snappy","Snowflake","SnowflakeConnection","SnowflakeCortex","SnowflakeCredentials","SolarwindsServiceDesk","SonarCloud","SortBy","Source7shifts","SourceActivecampaign","SourceAgilecrm","SourceAha","SourceAirbyte","SourceAircall","SourceAirtable","SourceAirtableAirtable","SourceAirtableAuthMethod","SourceAirtableAuthentication","SourceAirtableOAuth20","SourceAirtablePersonalAccessToken","SourceAirtableSchemasAuthMethod","SourceAkeneo","SourceAlgolia","SourceAmazonAds","SourceAmazonAdsAmazonAds","SourceAmazonAdsAuthType","SourceAmazonSellerPartner","SourceAmazonSellerPartnerAmazonSellerPartner","SourceAmazonSellerPartnerAuthType","SourceAmazonSqs","SourceAmazonSqsAWSRegion","SourceAmplitude","SourceApifyDataset","SourceAppcues","SourceAppfigures","SourceAppfollow","SourceAppleSearchAds","SourceApptivo","SourceAsana","SourceAsanaAsana","SourceAsanaCredentialsTitle","SourceAsanaSchemasCredentialsTitle","SourceAshby","SourceAuth0","SourceAuth0AuthenticationMethod","SourceAuth0SchemasAuthenticationMethod","SourceAuth0SchemasCredentialsAuthenticationMethod","SourceAwsCloudtrail","SourceAzureBlobStorage","SourceAzureBlobStorageAuthType","SourceAzureBlobStorageAuthentication","SourceAzureBlobStorageAzureBlobStorage","SourceAzureBlobStorageFiletype","SourceAzureBlobStorageHeaderDefinitionType","SourceAzureBlobStorageMode","SourceAzureBlobStorageSchemasAuthType","SourceAzureBlobStorageSchemasFiletype","SourceAzureBlobStorageSchemasHeaderDefinitionType","SourceAzureBlobStorageSchemasStreamsFiletype","SourceAzureBlobStorageSchemasStreamsFormatFiletype","SourceAzureTable","SourceBambooHr","SourceBasecamp","SourceBeamer","SourceBigmailer","SourceBigquery","SourceBigqueryBigquery","SourceBingAds","SourceBingAdsBingAds","SourceBitly","SourceBlogger","SourceBox","SourceBraintree","SourceBraintreeEnvironment","SourceBraze","SourceBreezyHr","SourceBrevo","SourceBrex","SourceBugsnag","SourceBuildkite","SourceBunnyInc","SourceBuzzsprout","SourceCalCom","SourceCalendly","SourceCallrail","SourceCampaignMonitor","SourceCampayn","SourceCanny","SourceCapsuleCrm","SourceCareQualityCommission","SourceCart","SourceCartAuthType","SourceCartAuthorizationMethod","SourceCartSchemasAuthType","SourceCastorEdc","SourceChameleon","SourceChargebee","SourceChargedesk","SourceChargify","SourceChartmogul","SourceCimis","SourceCin7","SourceCirca","SourceCircleci","SourceClarifAi","SourceClazar","SourceClickhouse","SourceClickhouseClickhouse","SourceClickhouseNoTunnel","SourceClickhousePasswordAuthentication","SourceClickhouseSSHKeyAuthentication","SourceClickhouseSSHTunnelMethod","SourceClickhouseSchemasTunnelMethod","SourceClickhouseSchemasTunnelMethodTunnelMethod","SourceClickhouseTunnelMethod","SourceClickupAPI","SourceClockify","SourceClockodo","SourceCloseCom","SourceCloudbeds","SourceCoassemble","SourceCoda","SourceCodefresh","SourceCoinAPI","SourceCoinmarketcap","SourceConcord","SourceConcordEnvironment","SourceConfigcat","SourceConfiguration","SourceConfluence","SourceConvertkit","SourceConvex","SourceConvexConvex","SourceCopper","SourceCountercyclical","SourceCreateRequest","SourceCustomerIo","SourceDatadog","SourceDatascope","SourceDbt","SourceDelighted","SourceDeputy","SourceDixa","SourceDockerhub","SourceDremio","SourceDrift","SourceDriftAuthorizationMethod","SourceDriftCredentials","SourceDriftDrift","SourceDriftOAuth20","SourceDriftSchemasCredentials","SourceDrip","SourceDropboxSign","SourceDynamodb","SourceDynamodbAuthType","SourceDynamodbCredentials","SourceDynamodbDynamodb","SourceDynamodbDynamodbRegion","SourceDynamodbSchemasAuthType","SourceEConomic","SourceEasypost","SourceEasypromos","SourceElasticemail","SourceEmailoctopus","SourceEmploymentHero","SourceEncharge","SourceEventbrite","SourceEventee","SourceEventzilla","SourceExchangeRates","SourceEzofficeinventory","SourceFacebookMarketing","SourceFacebookMarketingActionReportTime","SourceFacebookMarketingAuthType","SourceFacebookMarketingAuthentication","SourceFacebookMarketingFacebookMarketing","SourceFacebookMarketingSchemasAuthType","SourceFacebookMarketingValidEnums","SourceFactorial","SourceFaker","SourceFauna","SourceFaunaDeletionMode","SourceFaunaSchemasDeletionMode","SourceFile","SourceFileFileFormat","SourceFileSchemasProviderStorage","SourceFileSchemasProviderStorageProvider6Storage","SourceFileSchemasProviderStorageProvider7Storage","SourceFileSchemasProviderStorageProvider8Storage","SourceFileSchemasProviderStorageProviderStorage","SourceFileSchemasStorage","SourceFileStorage","SourceFillout","SourceFinage","SourceFinancialModelling","SourceFinnhub","SourceFinnworlds","SourceFirebolt","SourceFireboltFirebolt","SourceFirehydrant","SourceFleetio","SourceFlexmail","SourceFlexport","SourceFloat","SourceFlowlu","SourceFormbricks","SourceFreeAgentConnector","SourceFreightview","SourceFreshbooks","SourceFreshcaller","SourceFreshchat","SourceFreshdesk","SourceFreshsales","SourceFreshservice","SourceFront","SourceFulcrum","SourceGainsightPx","SourceGcs","SourceGcsAuthType","SourceGcsAuthentication","SourceGcsAutogenerated","SourceGcsAvroFormat","SourceGcsCSVFormat","SourceGcsCSVHeaderDefinition","SourceGcsFileBasedStreamConfig","SourceGcsFiletype","SourceGcsFormat","SourceGcsFromCSV","SourceGcsGcs","SourceGcsHeaderDefinitionType","SourceGcsJsonlFormat","SourceGcsLocal","SourceGcsMode","SourceGcsParquetFormat","SourceGcsParsingStrategy","SourceGcsProcessing","SourceGcsSchemasAuthType","SourceGcsSchemasFiletype","SourceGcsSchemasHeaderDefinitionType","SourceGcsSchemasMode","SourceGcsSchemasStreamsFiletype","SourceGcsSchemasStreamsFormatFiletype","SourceGcsSchemasStreamsFormatFormat6Filetype","SourceGcsSchemasStreamsFormatFormatFiletype","SourceGcsSchemasStreamsHeaderDefinitionType","SourceGcsUserProvided","SourceGcsValidationPolicy","SourceGetgist","SourceGetlago","SourceGitbook","SourceGithub","SourceGithubAuthentication","SourceGithubGithub","SourceGithubOptionTitle","SourceGithubPersonalAccessToken","SourceGitlab","SourceGitlabAuthType","SourceGitlabAuthorizationMethod","SourceGitlabGitlab","SourceGitlabOAuth20","SourceGitlabSchemasAuthType","SourceGlassfrog","SourceGmail","SourceGnews","SourceGnewsCountry","SourceGnewsLanguage","SourceGnewsSortBy","SourceGocardless","SourceGoldcast","SourceGong","SourceGoogleAds","SourceGoogleAdsGoogleAds","SourceGoogleAnalyticsDataAPI","SourceGoogleAnalyticsDataAPIAndGroup","SourceGoogleAnalyticsDataAPIAuthType","SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth","SourceGoogleAnalyticsDataAPIBetweenFilter","SourceGoogleAnalyticsDataAPICredentials","SourceGoogleAnalyticsDataAPICustomReportConfig","SourceGoogleAnalyticsDataAPIDisabled","SourceGoogleAnalyticsDataAPIDoubleValue","SourceGoogleAnalyticsDataAPIEnabled","SourceGoogleAnalyticsDataAPIExpression","SourceGoogleAnalyticsDataAPIFilter","SourceGoogleAnalyticsDataAPIFilterName","SourceGoogleAnalyticsDataAPIFilterType","SourceGoogleAnalyticsDataAPIFromValue","SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI","SourceGoogleAnalyticsDataAPIGranularity","SourceGoogleAnalyticsDataAPIInListFilter","SourceGoogleAnalyticsDataAPIInt64Value","SourceGoogleAnalyticsDataAPINotExpression","SourceGoogleAnalyticsDataAPINumericFilter","SourceGoogleAnalyticsDataAPIOrGroup","SourceGoogleAnalyticsDataAPISchemasAuthType","SourceGoogleAnalyticsDataAPISchemasBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayEnabled","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayExpression","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterExpression","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterExpression","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueType","SourceGoogleAnalyticsDataAPISchemasDoubleValue","SourceGoogleAnalyticsDataAPISchemasEnabled","SourceGoogleAnalyticsDataAPISchemasExpression","SourceGoogleAnalyticsDataAPISchemasFilter","SourceGoogleAnalyticsDataAPISchemasFilterName","SourceGoogleAnalyticsDataAPISchemasFilterType","SourceGoogleAnalyticsDataAPISchemasFromValue","SourceGoogleAnalyticsDataAPISchemasInListFilter","SourceGoogleAnalyticsDataAPISchemasInt64Value","SourceGoogleAnalyticsDataAPISchemasNumericFilter","SourceGoogleAnalyticsDataAPISchemasStringFilter","SourceGoogleAnalyticsDataAPISchemasToValue","SourceGoogleAnalyticsDataAPISchemasValidEnums","SourceGoogleAnalyticsDataAPISchemasValue","SourceGoogleAnalyticsDataAPISchemasValueType","SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication","SourceGoogleAnalyticsDataAPIStringFilter","SourceGoogleAnalyticsDataAPIToValue","SourceGoogleAnalyticsDataAPIValidEnums","SourceGoogleAnalyticsDataAPIValue","SourceGoogleAnalyticsDataAPIValueType","SourceGoogleCalendar","SourceGoogleClassroom","SourceGoogleDirectory","SourceGoogleDirectoryCredentialsTitle","SourceGoogleDirectoryGoogleCredentials","SourceGoogleDirectorySchemasCredentialsTitle","SourceGoogleDrive","SourceGoogleDriveAuthType","SourceGoogleDriveAuthenticateViaGoogleOAuth","SourceGoogleDriveAuthentication","SourceGoogleDriveAutogenerated","SourceGoogleDriveAvroFormat","SourceGoogleDriveCSVFormat","SourceGoogleDriveCSVHeaderDefinition","SourceGoogleDriveDocumentFileTypeFormatExperimental","SourceGoogleDriveFileBasedStreamConfig","SourceGoogleDriveFiletype","SourceGoogleDriveFormat","SourceGoogleDriveFromCSV","SourceGoogleDriveGoogleDrive","SourceGoogleDriveHeaderDefinitionType","SourceGoogleDriveJsonlFormat","SourceGoogleDriveLocal","SourceGoogleDriveMode","SourceGoogleDriveParquetFormat","SourceGoogleDriveParsingStrategy","SourceGoogleDriveProcessing","SourceGoogleDriveSchemasAuthType","SourceGoogleDriveSchemasFiletype","SourceGoogleDriveSchemasHeaderDefinitionType","SourceGoogleDriveSchemasStreamsFiletype","SourceGoogleDriveSchemasStreamsFormatFiletype","SourceGoogleDriveSchemasStreamsFormatFormatFiletype","SourceGoogleDriveSchemasStreamsHeaderDefinitionType","SourceGoogleDriveServiceAccountKeyAuthentication","SourceGoogleDriveUserProvided","SourceGoogleDriveValidationPolicy","SourceGoogleForms","SourceGooglePagespeedInsights","SourceGoogleSearchConsole","SourceGoogleSearchConsoleAuthType","SourceGoogleSearchConsoleCustomReportConfig","SourceGoogleSearchConsoleGoogleSearchConsole","SourceGoogleSearchConsoleOAuth","SourceGoogleSearchConsoleSchemasAuthType","SourceGoogleSearchConsoleServiceAccountKeyAuthentication","SourceGoogleSearchConsoleValidEnums","SourceGoogleSheets","SourceGoogleSheetsAuthType","SourceGoogleSheetsAuthenticateViaGoogleOAuth","SourceGoogleSheetsAuthentication","SourceGoogleSheetsGoogleSheets","SourceGoogleSheetsSchemasAuthType","SourceGoogleSheetsServiceAccountKeyAuthentication","SourceGoogleTasks","SourceGoogleWebfonts","SourceGorgias","SourceGreenhouse","SourceGridly","SourceGuru","SourceGutendex","SourceHardcodedRecords","SourceHarvest","SourceHarvestAuthType","SourceHarvestAuthenticateWithPersonalAccessToken","SourceHarvestAuthenticationMechanism","SourceHarvestSchemasAuthType","SourceHeight","SourceHibob","SourceHighLevel","SourceHubplanner","SourceHubspot","SourceHubspotAuthType","SourceHubspotAuthentication","SourceHubspotHubspot","SourceHubspotOAuth","SourceHubspotSchemasAuthType","SourceHumanitix","SourceIlluminaBasespace","SourceIncidentIo","SourceInflowinventory","SourceInsightly","SourceInstagram","SourceInstagramInstagram","SourceInstatus","SourceIntercom","SourceIntercomIntercom","SourceInvoiced","SourceInvoiceninja","SourceIp2whois","SourceIterable","SourceJira","SourceJobnimbus","SourceJotform","SourceJotformAPIEndpoint","SourceJotformSchemasAPIEndpoint","SourceJustSift","SourceJustcall","SourceK6Cloud","SourceKatana","SourceKisi","SourceKissmetrics","SourceKlarna","SourceKlarnaRegion","SourceKlausAPI","SourceKlaviyo","SourceKyve","SourceLaunchdarkly","SourceLeadfeeder","SourceLemlist","SourceLessAnnoyingCrm","SourceLeverHiring","SourceLeverHiringAuthType","SourceLeverHiringAuthenticationMechanism","SourceLeverHiringEnvironment","SourceLeverHiringLeverHiring","SourceLeverHiringSchemasAuthType","SourceLightspeedRetail","SourceLinkedinAds","SourceLinkedinAdsAccessToken","SourceLinkedinAdsAuthMethod","SourceLinkedinAdsAuthentication","SourceLinkedinAdsLinkedinAds","SourceLinkedinAdsOAuth20","SourceLinkedinAdsSchemasAuthMethod","SourceLinkedinPages","SourceLinkedinPagesAccessToken","SourceLinkedinPagesAuthMethod","SourceLinkedinPagesAuthentication","SourceLinkedinPagesOAuth20","SourceLinkedinPagesSchemasAuthMethod","SourceLinnworks","SourceLob","SourceLokalise","SourceLooker","SourceLuma","SourceMailchimp","SourceMailchimpAuthType","SourceMailchimpAuthentication","SourceMailchimpMailchimp","SourceMailchimpOAuth20","SourceMailchimpSchemasAuthType","SourceMailerlite","SourceMailgun","SourceMailjetMail","SourceMailjetSms","SourceMailosaur","SourceMailtrap","SourceMarketo","SourceMarketstack","SourceMention","SourceMetabase","SourceMicrosoftDataverse","SourceMicrosoftEntraID","SourceMicrosoftLists","SourceMicrosoftOnedrive","SourceMicrosoftOnedriveAuthType","SourceMicrosoftOnedriveAuthentication","SourceMicrosoftOnedriveAutogenerated","SourceMicrosoftOnedriveAvroFormat","SourceMicrosoftOnedriveCSVFormat","SourceMicrosoftOnedriveCSVHeaderDefinition","SourceMicrosoftOnedriveFileBasedStreamConfig","SourceMicrosoftOnedriveFiletype","SourceMicrosoftOnedriveFormat","SourceMicrosoftOnedriveFromCSV","SourceMicrosoftOnedriveHeaderDefinitionType","SourceMicrosoftOnedriveJsonlFormat","SourceMicrosoftOnedriveLocal","SourceMicrosoftOnedriveMicrosoftOnedrive","SourceMicrosoftOnedriveMode","SourceMicrosoftOnedriveParquetFormat","SourceMicrosoftOnedriveParsingStrategy","SourceMicrosoftOnedriveProcessing","SourceMicrosoftOnedriveSchemasAuthType","SourceMicrosoftOnedriveSchemasFiletype","SourceMicrosoftOnedriveSchemasHeaderDefinitionType","SourceMicrosoftOnedriveSchemasStreamsFiletype","SourceMicrosoftOnedriveSchemasStreamsFormatFiletype","SourceMicrosoftOnedriveSchemasStreamsFormatFormatFiletype","SourceMicrosoftOnedriveSchemasStreamsHeaderDefinitionType","SourceMicrosoftOnedriveUnstructuredDocumentFormat","SourceMicrosoftOnedriveUserProvided","SourceMicrosoftOnedriveValidationPolicy","SourceMicrosoftSharepoint","SourceMicrosoftSharepointAuthType","SourceMicrosoftSharepointAuthenticateViaMicrosoftOAuth","SourceMicrosoftSharepointAuthentication","SourceMicrosoftSharepointAutogenerated","SourceMicrosoftSharepointAvroFormat","SourceMicrosoftSharepointCSVFormat","SourceMicrosoftSharepointCSVHeaderDefinition","SourceMicrosoftSharepointExcelFormat","SourceMicrosoftSharepointFileBasedStreamConfig","SourceMicrosoftSharepointFiletype","SourceMicrosoftSharepointFormat","SourceMicrosoftSharepointFromCSV","SourceMicrosoftSharepointHeaderDefinitionType","SourceMicrosoftSharepointJsonlFormat","SourceMicrosoftSharepointLocal","SourceMicrosoftSharepointMicrosoftSharepoint","SourceMicrosoftSharepointMode","SourceMicrosoftSharepointParquetFormat","SourceMicrosoftSharepointParsingStrategy","SourceMicrosoftSharepointProcessing","SourceMicrosoftSharepointSchemasAuthType","SourceMicrosoftSharepointSchemasFiletype","SourceMicrosoftSharepointSchemasHeaderDefinitionType","SourceMicrosoftSharepointSchemasStreamsFiletype","SourceMicrosoftSharepointSchemasStreamsFormatFiletype","SourceMicrosoftSharepointSchemasStreamsFormatFormat6Filetype","SourceMicrosoftSharepointSchemasStreamsFormatFormatFiletype","SourceMicrosoftSharepointSchemasStreamsHeaderDefinitionType","SourceMicrosoftSharepointSearchScope","SourceMicrosoftSharepointServiceKeyAuthentication","SourceMicrosoftSharepointUnstructuredDocumentFormat","SourceMicrosoftSharepointUserProvided","SourceMicrosoftSharepointValidationPolicy","SourceMicrosoftTeams","SourceMicrosoftTeamsAuthType","SourceMicrosoftTeamsAuthenticationMechanism","SourceMicrosoftTeamsMicrosoftTeams","SourceMicrosoftTeamsSchemasAuthType","SourceMiro","SourceMissive","SourceMixmax","SourceMixpanel","SourceMixpanelOptionTitle","SourceMixpanelRegion","SourceMixpanelSchemasOptionTitle","SourceMode","SourceMonday","SourceMondayAuthType","SourceMondayAuthorizationMethod","SourceMondayMonday","SourceMondayOAuth20","SourceMondaySchemasAuthType","SourceMongodbV2","SourceMongodbV2ClusterType","SourceMongodbV2SchemasClusterType","SourceMssql","SourceMssqlEncryptedTrustServerCertificate","SourceMssqlEncryptedVerifyCertificate","SourceMssqlInvalidCDCPositionBehaviorAdvanced","SourceMssqlMethod","SourceMssqlMssql","SourceMssqlNoTunnel","SourceMssqlPasswordAuthentication","SourceMssqlSSHKeyAuthentication","SourceMssqlSSHTunnelMethod","SourceMssqlSSLMethod","SourceMssqlSchemasMethod","SourceMssqlSchemasSSLMethodSSLMethodSSLMethod","SourceMssqlSchemasSslMethod","SourceMssqlSchemasSslMethodSslMethod","SourceMssqlSchemasTunnelMethod","SourceMssqlSchemasTunnelMethodTunnelMethod","SourceMssqlTunnelMethod","SourceMssqlUnencrypted","SourceMux","SourceMyHours","SourceMysql","SourceMysqlInvalidCDCPositionBehaviorAdvanced","SourceMysqlMethod","SourceMysqlMode","SourceMysqlMysql","SourceMysqlNoTunnel","SourceMysqlPasswordAuthentication","SourceMysqlSSHKeyAuthentication","SourceMysqlSSHTunnelMethod","SourceMysqlSSLModes","SourceMysqlScanChangesWithUserDefinedCursor","SourceMysqlSchemasMethod","SourceMysqlSchemasMode","SourceMysqlSchemasSSLModeSSLModesMode","SourceMysqlSchemasSslModeMode","SourceMysqlSchemasTunnelMethod","SourceMysqlSchemasTunnelMethodTunnelMethod","SourceMysqlTunnelMethod","SourceMysqlUpdateMethod","SourceMysqlVerifyCA","SourceN8n","SourceNasa","SourceNetsuite","SourceNewsAPI","SourceNewsdataIo","SourceNocrm","SourceNorthpassLms","SourceNotion","SourceNotionAccessToken","SourceNotionAuthType","SourceNotionAuthenticationMethod","SourceNotionNotion","SourceNotionOAuth20","SourceNotionSchemasAuthType","SourceNutshell","SourceNylas","SourceNytimes","SourceOkta","SourceOktaAPIToken","SourceOktaAuthType","SourceOktaAuthorizationMethod","SourceOktaOAuth20","SourceOktaSchemasAuthType","SourceOktaSchemasCredentialsAuthType","SourceOmnisend","SourceOncehub","SourceOnepagecrm","SourceOnesignal","SourceOnfleet","SourceOpenDataDc","SourceOpenaq","SourceOpenfda","SourceOpenweather","SourceOpinionStage","SourceOpsgenie","SourceOracle","SourceOracleConnectionType","SourceOracleEncryption","SourceOracleEncryptionAlgorithm","SourceOracleEncryptionMethod","SourceOracleNativeNetworkEncryptionNNE","SourceOracleNoTunnel","SourceOracleOracle","SourceOraclePasswordAuthentication","SourceOracleSSHKeyAuthentication","SourceOracleSSHTunnelMethod","SourceOracleSchemasEncryptionEncryptionMethod","SourceOracleSchemasEncryptionMethod","SourceOracleSchemasTunnelMethod","SourceOracleSchemasTunnelMethodTunnelMethod","SourceOracleTLSEncryptedVerifyCertificate","SourceOracleTunnelMethod","SourceOracleUnencrypted","SourceOrb","SourceOrbit","SourceOura","SourceOutbrainAmplify","SourceOutbrainAmplifyAccessToken","SourceOutbrainAmplifyAuthenticationMethod","SourceOutbrainAmplifyUsernamePassword","SourceOutreach","SourceOveit","SourcePabblySubscriptionsBilling","SourcePandadoc","SourcePaperform","SourcePapersign","SourcePardot","SourcePatchRequest","SourcePaypalTransaction","SourcePaystack","SourcePendo","SourcePennylane","SourcePersistiq","SourcePersona","SourcePexelsAPI","SourcePicqer","SourcePinterest","SourcePinterestAuthMethod","SourcePinterestLevel","SourcePinterestPinterest","SourcePinterestSchemasValidEnums","SourcePinterestValidEnums","SourcePipedrive","SourcePipeliner","SourcePivotalTracker","SourcePiwik","SourcePlaid","SourcePlanhat","SourcePlausible","SourcePocket","SourcePocketSortBy","SourcePokeapi","SourcePolygonStockAPI","SourcePostgres","SourcePostgresAllow","SourcePostgresDisable","SourcePostgresInvalidCDCPositionBehaviorAdvanced","SourcePostgresMethod","SourcePostgresMode","SourcePostgresNoTunnel","SourcePostgresPasswordAuthentication","SourcePostgresPostgres","SourcePostgresPrefer","SourcePostgresRequire","SourcePostgresSSHKeyAuthentication","SourcePostgresSSHTunnelMethod","SourcePostgresSSLModes","SourcePostgresScanChangesWithUserDefinedCursor","SourcePostgresSchemasMethod","SourcePostgresSchemasMode","SourcePostgresSchemasReplicationMethodMethod","SourcePostgresSchemasSSLModeSSLModes5Mode","SourcePostgresSchemasSSLModeSSLModes6Mode","SourcePostgresSchemasSSLModeSSLModesMode","SourcePostgresSchemasSslModeMode","SourcePostgresSchemasTunnelMethod","SourcePostgresSchemasTunnelMethodTunnelMethod","SourcePostgresTunnelMethod","SourcePostgresUpdateMethod","SourcePostgresVerifyCa","SourcePostgresVerifyFull","SourcePosthog","SourcePostmarkapp","SourcePrestashop","SourcePretix","SourcePrimetric","SourceProductboard","SourceProductive","SourcePutRequest","SourcePypi","SourceQualaroo","SourceQuickbooks","SourceQuickbooksAuthType","SourceQuickbooksAuthorizationMethod","SourceQuickbooksOAuth20","SourceRailz","SourceRdStationMarketing","SourceRdStationMarketingAuthType","SourceRdStationMarketingAuthenticationType","SourceRdStationMarketingRdStationMarketing","SourceRecharge","SourceRecreation","SourceRecruitee","SourceRecurly","SourceReddit","SourceRedshift","SourceRedshiftRedshift","SourceReferralhero","SourceRentcast","SourceRepairshopr","SourceReplyIo","SourceResponse","SourceRetently","SourceRetentlyAuthType","SourceRetentlyAuthenticationMechanism","SourceRetentlySchemasAuthType","SourceRevenuecat","SourceRevolutMerchant","SourceRevolutMerchantEnvironment","SourceRkiCovid","SourceRocketlane","SourceRollbar","SourceRootly","SourceRss","SourceRuddr","SourceS3","SourceS3Autogenerated","SourceS3AvroFormat","SourceS3CSVFormat","SourceS3CSVHeaderDefinition","SourceS3DeliveryType","SourceS3ExcelFormat","SourceS3FileBasedStreamConfig","SourceS3Filetype","SourceS3Format","SourceS3FromCSV","SourceS3HeaderDefinitionType","SourceS3JsonlFormat","SourceS3Local","SourceS3Mode","SourceS3ParquetFormat","SourceS3ParsingStrategy","SourceS3Processing","SourceS3S3","SourceS3SchemasFiletype","SourceS3SchemasHeaderDefinitionType","SourceS3SchemasStreamsFiletype","SourceS3SchemasStreamsFormatFiletype","SourceS3SchemasStreamsFormatFormat6Filetype","SourceS3SchemasStreamsFormatFormatFiletype","SourceS3SchemasStreamsHeaderDefinitionType","SourceS3UnstructuredDocumentFormat","SourceS3UserProvided","SourceS3ValidationPolicy","SourceSafetyculture","SourceSageHr","SourceSalesflare","SourceSalesforce","SourceSalesforceSalesforce","SourceSalesloft","SourceSalesloftAuthType","SourceSalesloftCredentials","SourceSalesloftSchemasAuthType","SourceSapFieldglass","SourceSavvycal","SourceScryfall","SourceSecoda","SourceSegment","SourceSendgrid","SourceSendinblue","SourceSendowl","SourceSendpulse","SourceSenseforce","SourceSentry","SourceSftp","SourceSftpAuthMethod","SourceSftpAuthentication","SourceSftpBulk","SourceSftpBulkAPIParameterConfigModel","SourceSftpBulkAuthType","SourceSftpBulkAuthentication","SourceSftpBulkAutogenerated","SourceSftpBulkAvroFormat","SourceSftpBulkCSVFormat","SourceSftpBulkCSVHeaderDefinition","SourceSftpBulkCopyRawFiles","SourceSftpBulkDeliveryMethod","SourceSftpBulkDeliveryType","SourceSftpBulkExcelFormat","SourceSftpBulkFileBasedStreamConfig","SourceSftpBulkFiletype","SourceSftpBulkFormat","SourceSftpBulkFromCSV","SourceSftpBulkHeaderDefinitionType","SourceSftpBulkJsonlFormat","SourceSftpBulkLocal","SourceSftpBulkMode","SourceSftpBulkParquetFormat","SourceSftpBulkParsingStrategy","SourceSftpBulkProcessing","SourceSftpBulkReplicateRecords","SourceSftpBulkSchemasAuthType","SourceSftpBulkSchemasDeliveryType","SourceSftpBulkSchemasFiletype","SourceSftpBulkSchemasHeaderDefinitionType","SourceSftpBulkSchemasMode","SourceSftpBulkSchemasStreamsFiletype","SourceSftpBulkSchemasStreamsFormatFiletype","SourceSftpBulkSchemasStreamsFormatFormat6Filetype","SourceSftpBulkSchemasStreamsFormatFormatFiletype","SourceSftpBulkSchemasStreamsHeaderDefinitionType","SourceSftpBulkUnstructuredDocumentFormat","SourceSftpBulkUserProvided","SourceSftpBulkValidationPolicy","SourceSftpBulkViaAPI","SourceSftpPasswordAuthentication","SourceSftpSSHKeyAuthentication","SourceSftpSchemasAuthMethod","SourceSharetribe","SourceShippo","SourceShopify","SourceShopifyAuthMethod","SourceShopifyOAuth20","SourceShopifySchemasAuthMethod","SourceShopifyShopify","SourceShortcut","SourceShortio","SourceSigmaComputing","SourceSimfin","SourceSimplecast","SourceSimplesat","SourceSlack","SourceSlackAPIToken","SourceSlackAuthenticationMechanism","SourceSlackOptionTitle","SourceSlackSchemasOptionTitle","SourceSlackSlack","SourceSmaily","SourceSmartengage","SourceSmartreach","SourceSmartsheets","SourceSmartsheetsAuthType","SourceSmartsheetsAuthorizationMethod","SourceSmartsheetsOAuth20","SourceSmartsheetsSchemasAuthType","SourceSmartsheetsSmartsheets","SourceSmartwaiver","SourceSnapchatMarketing","SourceSnapchatMarketingSnapchatMarketing","SourceSnowflake","SourceSnowflakeAuthType","SourceSnowflakeAuthorizationMethod","SourceSnowflakeKeyPairAuthentication","SourceSnowflakeOAuth20","SourceSnowflakeSchemasAuthType","SourceSnowflakeSchemasCredentialsAuthType","SourceSnowflakeSnowflake","SourceSnowflakeUsernameAndPassword","SourceSolarwindsServiceDesk","SourceSonarCloud","SourceSpacexAPI","SourceSparkpost","SourceSplitIo","SourceSpotlercrm","SourceSquare","SourceSquareAPIKey","SourceSquareAuthType","SourceSquareAuthentication","SourceSquareSchemasAuthType","SourceSquarespace","SourceStatsig","SourceStatuspage","SourceStockdata","SourceStrava","SourceStravaAuthType","SourceStripe","SourceSurveySparrow","SourceSurveySparrowURLBase","SourceSurveymonkey","SourceSurveymonkeyAuthMethod","SourceSurveymonkeySurveymonkey","SourceSurvicate","SourceSysteme","SourceTaboola","SourceTeamtailor","SourceTeamwork","SourceTempo","SourceTestrail","SourceTheGuardianAPI","SourceThinkific","SourceTicketmaster","SourceTickettailor","SourceTiktokMarketing","SourceTiktokMarketingAuthType","SourceTiktokMarketingAuthenticationMethod","SourceTiktokMarketingOAuth20","SourceTiktokMarketingSchemasAuthType","SourceTiktokMarketingTiktokMarketing","SourceTimely","SourceTinyemail","SourceTodoist","SourceTrackPms","SourceTrello","SourceTremendous","SourceTremendousEnvironment","SourceTrustpilot","SourceTrustpilotAPIKey","SourceTrustpilotAuthType","SourceTrustpilotAuthorizationMethod","SourceTrustpilotOAuth20","SourceTrustpilotSchemasAuthType","SourceTvmazeSchedule","SourceTwelveData","SourceTwilio","SourceTwilioTaskrouter","SourceTwitter","SourceTypeform","SourceTypeformAuthType","SourceTypeformAuthorizationMethod","SourceTypeformOAuth20","SourceTypeformPrivateToken","SourceTypeformSchemasAuthType","SourceTypeformTypeform","SourceUbidots","SourceUnleash","SourceUppromote","SourceUsCensus","SourceUservoice","SourceVantage","SourceVeeqo","SourceVercel","SourceVismaEconomic","SourceVitally","SourceVitallyStatus","SourceVwo","SourceWaiteraid","SourceWasabiStatsAPI","SourceWeatherstack","SourceWebScrapper","SourceWebflow","SourceWhenIWork","SourceWhiskyHunter","SourceWikipediaPageviews","SourceWoocommerce","SourceWordpress","SourceWorkable","SourceWorkflowmax","SourceWorkramp","SourceWrike","SourceWufoo","SourceXkcd","SourceXsolla","SourceYahooFinancePrice","SourceYahooFinancePriceInterval","SourceYandexMetrica","SourceYotpo","SourceYouNeedABudgetYnab","SourceYoutubeAnalytics","SourceYoutubeAnalyticsYoutubeAnalytics","SourceYoutubeData","SourceZapierSupportedStorage","SourceZendeskChat","SourceZendeskChatAccessToken","SourceZendeskChatAuthorizationMethod","SourceZendeskChatCredentials","SourceZendeskChatOAuth20","SourceZendeskChatSchemasCredentials","SourceZendeskChatZendeskChat","SourceZendeskSunshine","SourceZendeskSunshineAPIToken","SourceZendeskSunshineAuthMethod","SourceZendeskSunshineAuthorizationMethod","SourceZendeskSunshineOAuth20","SourceZendeskSunshineSchemasAuthMethod","SourceZendeskSupport","SourceZendeskSupportAPIToken","SourceZendeskSupportAuthentication","SourceZendeskSupportCredentials","SourceZendeskSupportOAuth20","SourceZendeskSupportSchemasCredentials","SourceZendeskSupportZendeskSupport","SourceZendeskTalk","SourceZendeskTalkAPIToken","SourceZendeskTalkAuthType","SourceZendeskTalkAuthentication","SourceZendeskTalkOAuth20","SourceZendeskTalkSchemasAuthType","SourceZendeskTalkZendeskTalk","SourceZenefits","SourceZenloop","SourceZohoAnalyticsMetadataAPI","SourceZohoAnalyticsMetadataAPIDataCenter","SourceZohoBigin","SourceZohoBiginDataCenter","SourceZohoBilling","SourceZohoBillingRegion","SourceZohoBooks","SourceZohoBooksRegion","SourceZohoCampaign","SourceZohoCampaignDataCenter","SourceZohoCrm","SourceZohoCrmEnvironment","SourceZohoDesk","SourceZohoExpense","SourceZohoExpenseDataCenter","SourceZohoInventory","SourceZohoInvoice","SourceZohoInvoiceRegion","SourceZonkaFeedback","SourceZoom","SourcesResponse","SpacexAPI","Sparkpost","SplitIo","Spotlercrm","Square","Squarespace","StandaloneMongoDbInstance","State","StateFilterEnum","StatisticsInterval","Statsig","Status","Statuspage","Stockdata","Storage","StorageConfig","StorageProvider","StorageType","Strategies","Strava","StreamConfiguration","StreamConfigurations","StreamMapperType","StreamProperties","StreamsCriteria","StringFilter","Stripe","SurveyMonkeyAuthorizationMethod","SurveySparrow","Surveymonkey","SurveymonkeyCredentials","Survicate","SwipeUpAttributionWindow","SystemIDSID","Systeme","TLSEncryptedVerifyCertificate","Taboola","TargetsType","Teamtailor","Teamwork","TechnicalIndicatorType","Tempo","Teradata","TestDestination","TestDestinationType","Testrail","TextSplitter","TheGuardianAPI","Thinkific","Throttled","Ticketmaster","Tickettailor","TiktokMarketing","TiktokMarketingCredentials","TimeAggregates","TimeFrame","TimeGranularity","TimeGranularityType","TimeInterval","TimePeriod","Timely","Timeplus","Tinyemail","ToValue","Todoist","TopHeadlinesTopic","TrackPms","TransformationQueryRunType","Trello","Tremendous","Trustpilot","TunnelMethod","TvmazeSchedule","TwelveData","Twilio","TwilioTaskrouter","Twitter","Typeform","TypeformCredentials","Typesense","URLBase","URLRegion","Ubidots","Unencrypted","UnitOfMeasure","Units","Unleash","UnstructuredDocumentFormat","UpdateMethod","UploadingMethod","Uppromote","UsCensus","UserProvided","UserResponse","UsernameAndPassword","UsernamePassword","UsersResponse","Uservoice","ValidActionBreakdowns","ValidAdSetStatuses","ValidAdStatuses","ValidBreakdowns","ValidCampaignStatuses","ValidationPolicy","Validenums","Value","ValueType","Vantage","Vectara","Veeqo","Vercel","VerifyCa","VerifyFull","VerifyIdentity","ViaAPI","ViewAttributionWindow","ViewWindowDays","VismaEconomic","Vitally","Vwo","Waiteraid","WasabiStatsAPI","Weatherstack","Weaviate","WebScrapper","Webflow","WhenIWork","WhiskyHunter","WikipediaPageviews","Woocommerce","Wordpress","Workable","Workflowmax","Workramp","WorkspaceCreateRequest","WorkspaceOAuthCredentialsRequest","WorkspaceResponse","WorkspaceUpdateRequest","WorkspacesResponse","Wrike","Wufoo","Xkcd","Xsolla","Xz","YahooFinancePrice","YandexMetrica","Yellowbrick","Yotpo","YouNeedABudgetYnab","YoutubeAnalytics","YoutubeAnalyticsCredentials","YoutubeData","ZapierSupportedStorage","ZendeskChat","ZendeskChatCredentials","ZendeskSunshine","ZendeskSupport","ZendeskSupportCredentials","ZendeskTalk","ZendeskTalkCredentials","Zenefits","Zenloop","ZohoAnalyticsMetadataAPI","ZohoBigin","ZohoBilling","ZohoBooks","ZohoCRMEdition","ZohoCampaign","ZohoCrm","ZohoDesk","ZohoExpense","ZohoInventory","ZohoInvoice","ZonkaFeedback","Zoom","Zstandard"] diff --git a/src/airbyte_api/models/airtable.py b/src/airbyte_api/models/airtable.py index 03857d7a..be33616f 100644 --- a/src/airbyte_api/models/airtable.py +++ b/src/airbyte_api/models/airtable.py @@ -13,7 +13,7 @@ class Credentials: client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }}) r"""The client ID of the Airtable developer application.""" client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }}) - r"""The client secret the Airtable developer application.""" + r"""The client secret of the Airtable developer application.""" diff --git a/src/airbyte_api/models/amazon_seller_partner.py b/src/airbyte_api/models/amazon_seller_partner.py index 0ab1d30f..07bb1ed4 100644 --- a/src/airbyte_api/models/amazon_seller_partner.py +++ b/src/airbyte_api/models/amazon_seller_partner.py @@ -10,6 +10,8 @@ @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class AmazonSellerPartner: + app_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('app_id'), 'exclude': lambda f: f is None }}) + r"""Your Amazon Application ID.""" lwa_app_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lwa_app_id'), 'exclude': lambda f: f is None }}) r"""Your Login with Amazon Client ID.""" lwa_client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lwa_client_secret'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/configuredstreammapper.py b/src/airbyte_api/models/configuredstreammapper.py index 7c733ea2..9bee54b0 100644 --- a/src/airbyte_api/models/configuredstreammapper.py +++ b/src/airbyte_api/models/configuredstreammapper.py @@ -6,6 +6,7 @@ from .streammappertype import StreamMapperType from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json +from typing import Optional @dataclass_json(undefined=Undefined.EXCLUDE) @@ -14,5 +15,6 @@ class ConfiguredStreamMapper: mapper_configuration: MapperConfiguration = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mapperConfiguration') }}) r"""The values required to configure the mapper.""" type: StreamMapperType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('type') }}) + id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/connectioncreaterequest.py b/src/airbyte_api/models/connectioncreaterequest.py index 09119c93..663f9652 100644 --- a/src/airbyte_api/models/connectioncreaterequest.py +++ b/src/airbyte_api/models/connectioncreaterequest.py @@ -4,13 +4,13 @@ import dataclasses from .airbyteapiconnectionschedule import AirbyteAPIConnectionSchedule from .connectionstatusenum import ConnectionStatusEnum -from .geographyenum import GeographyEnum from .namespacedefinitionenum import NamespaceDefinitionEnum from .nonbreakingschemaupdatesbehaviorenum import NonBreakingSchemaUpdatesBehaviorEnum -from .streamconfigurations import StreamConfigurations +from .streamconfigurations_input import StreamConfigurationsInput +from .tag import Tag from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json -from typing import Optional +from typing import List, Optional @dataclass_json(undefined=Undefined.EXCLUDE) @@ -18,9 +18,10 @@ class ConnectionCreateRequest: destination_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationId') }}) source_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceId') }}) - configurations: Optional[StreamConfigurations] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('configurations'), 'exclude': lambda f: f is None }}) + configurations: Optional[StreamConfigurationsInput] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('configurations'), 'exclude': lambda f: f is None }}) r"""A list of configured stream options for a connection.""" - data_residency: Optional[GeographyEnum] = dataclasses.field(default=GeographyEnum.AUTO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataResidency'), 'exclude': lambda f: f is None }}) + data_residency: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataResidency'), 'exclude': lambda f: f is None }}) + r"""Deprecated field: We no longer support modifying dataResidency on Community and Enterprise connections. All connections will use the dataResidency of their associated workspace..""" name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name'), 'exclude': lambda f: f is None }}) r"""Optional name of the connection""" namespace_definition: Optional[NamespaceDefinitionEnum] = dataclasses.field(default=NamespaceDefinitionEnum.DESTINATION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('namespaceDefinition'), 'exclude': lambda f: f is None }}) @@ -29,10 +30,11 @@ class ConnectionCreateRequest: r"""Used when namespaceDefinition is 'custom_format'. If blank then behaves like namespaceDefinition = 'destination'. If \\"${SOURCE_NAMESPACE}\\" then behaves like namespaceDefinition = 'source'.""" non_breaking_schema_updates_behavior: Optional[NonBreakingSchemaUpdatesBehaviorEnum] = dataclasses.field(default=NonBreakingSchemaUpdatesBehaviorEnum.IGNORE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('nonBreakingSchemaUpdatesBehavior'), 'exclude': lambda f: f is None }}) r"""Set how Airbyte handles syncs when it detects a non-breaking schema change in the source""" - prefix: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('prefix'), 'exclude': lambda f: f is None }}) + prefix: Optional[str] = dataclasses.field(default='', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('prefix'), 'exclude': lambda f: f is None }}) r"""Prefix that will be prepended to the name of each stream when it is written to the destination (ex. “airbyte_” causes “projects” => “airbyte_projects”).""" schedule: Optional[AirbyteAPIConnectionSchedule] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schedule'), 'exclude': lambda f: f is None }}) r"""schedule for when the the connection should run, per the schedule type""" status: Optional[ConnectionStatusEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('status'), 'exclude': lambda f: f is None }}) + tags: Optional[List[Tag]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tags'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/connectionpatchrequest.py b/src/airbyte_api/models/connectionpatchrequest.py index ee3c5baf..7d80233d 100644 --- a/src/airbyte_api/models/connectionpatchrequest.py +++ b/src/airbyte_api/models/connectionpatchrequest.py @@ -4,21 +4,22 @@ import dataclasses from .airbyteapiconnectionschedule import AirbyteAPIConnectionSchedule from .connectionstatusenum import ConnectionStatusEnum -from .geographyenumnodefault import GeographyEnumNoDefault from .namespacedefinitionenumnodefault import NamespaceDefinitionEnumNoDefault from .nonbreakingschemaupdatesbehaviorenumnodefault import NonBreakingSchemaUpdatesBehaviorEnumNoDefault -from .streamconfigurations import StreamConfigurations +from .streamconfigurations_input import StreamConfigurationsInput +from .tag import Tag from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json -from typing import Optional +from typing import List, Optional @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class ConnectionPatchRequest: - configurations: Optional[StreamConfigurations] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('configurations'), 'exclude': lambda f: f is None }}) + configurations: Optional[StreamConfigurationsInput] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('configurations'), 'exclude': lambda f: f is None }}) r"""A list of configured stream options for a connection.""" - data_residency: Optional[GeographyEnumNoDefault] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataResidency'), 'exclude': lambda f: f is None }}) + data_residency: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataResidency'), 'exclude': lambda f: f is None }}) + r"""Deprecated field: We no longer support modifying dataResidency on Community and Enterprise connections. All connections will use the dataResidency of their associated workspace..""" name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name'), 'exclude': lambda f: f is None }}) r"""Optional name of the connection""" namespace_definition: Optional[NamespaceDefinitionEnumNoDefault] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('namespaceDefinition'), 'exclude': lambda f: f is None }}) @@ -32,5 +33,6 @@ class ConnectionPatchRequest: schedule: Optional[AirbyteAPIConnectionSchedule] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schedule'), 'exclude': lambda f: f is None }}) r"""schedule for when the the connection should run, per the schedule type""" status: Optional[ConnectionStatusEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('status'), 'exclude': lambda f: f is None }}) + tags: Optional[List[Tag]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tags'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/connectionresponse.py b/src/airbyte_api/models/connectionresponse.py index f6999dc7..f9bcac74 100644 --- a/src/airbyte_api/models/connectionresponse.py +++ b/src/airbyte_api/models/connectionresponse.py @@ -4,13 +4,13 @@ import dataclasses from .connectionscheduleresponse import ConnectionScheduleResponse from .connectionstatusenum import ConnectionStatusEnum -from .geographyenum import GeographyEnum from .namespacedefinitionenum import NamespaceDefinitionEnum from .nonbreakingschemaupdatesbehaviorenum import NonBreakingSchemaUpdatesBehaviorEnum from .streamconfigurations import StreamConfigurations +from .tag import Tag from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json -from typing import Optional +from typing import List, Optional @dataclass_json(undefined=Undefined.EXCLUDE) @@ -27,8 +27,8 @@ class ConnectionResponse: r"""schedule for when the the connection should run, per the schedule type""" source_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceId') }}) status: ConnectionStatusEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('status') }}) + tags: List[Tag] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tags') }}) workspace_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspaceId') }}) - data_residency: Optional[GeographyEnum] = dataclasses.field(default=GeographyEnum.AUTO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataResidency'), 'exclude': lambda f: f is None }}) namespace_definition: Optional[NamespaceDefinitionEnum] = dataclasses.field(default=NamespaceDefinitionEnum.DESTINATION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('namespaceDefinition'), 'exclude': lambda f: f is None }}) r"""Define the location where the data will be stored in the destination""" namespace_format: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('namespaceFormat'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/connectionsyncmodeenum.py b/src/airbyte_api/models/connectionsyncmodeenum.py index 137ba6b8..17d1053f 100644 --- a/src/airbyte_api/models/connectionsyncmodeenum.py +++ b/src/airbyte_api/models/connectionsyncmodeenum.py @@ -6,6 +6,11 @@ class ConnectionSyncModeEnum(str, Enum): FULL_REFRESH_OVERWRITE = 'full_refresh_overwrite' + FULL_REFRESH_OVERWRITE_DEDUPED = 'full_refresh_overwrite_deduped' FULL_REFRESH_APPEND = 'full_refresh_append' + FULL_REFRESH_UPDATE = 'full_refresh_update' + FULL_REFRESH_SOFT_DELETE = 'full_refresh_soft_delete' INCREMENTAL_APPEND = 'incremental_append' INCREMENTAL_DEDUPED_HISTORY = 'incremental_deduped_history' + INCREMENTAL_UPDATE = 'incremental_update' + INCREMENTAL_SOFT_DELETE = 'incremental_soft_delete' diff --git a/src/airbyte_api/models/createdeclarativesourcedefinitionrequest.py b/src/airbyte_api/models/createdeclarativesourcedefinitionrequest.py new file mode 100644 index 00000000..f888a3a4 --- /dev/null +++ b/src/airbyte_api/models/createdeclarativesourcedefinitionrequest.py @@ -0,0 +1,17 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import Any + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class CreateDeclarativeSourceDefinitionRequest: + manifest: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('manifest') }}) + r"""Low code CDK manifest JSON object""" + name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + + diff --git a/src/airbyte_api/models/createdefinitionrequest.py b/src/airbyte_api/models/createdefinitionrequest.py new file mode 100644 index 00000000..b58ab1c0 --- /dev/null +++ b/src/airbyte_api/models/createdefinitionrequest.py @@ -0,0 +1,18 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import Optional + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class CreateDefinitionRequest: + docker_image_tag: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dockerImageTag') }}) + docker_repository: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dockerRepository') }}) + name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + documentation_url: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('documentationUrl'), 'exclude': lambda f: f is None }}) + + diff --git a/src/airbyte_api/models/declarativesourcedefinitionresponse.py b/src/airbyte_api/models/declarativesourcedefinitionresponse.py new file mode 100644 index 00000000..3e99cddf --- /dev/null +++ b/src/airbyte_api/models/declarativesourcedefinitionresponse.py @@ -0,0 +1,19 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import Any + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DeclarativeSourceDefinitionResponse: + id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id') }}) + manifest: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('manifest') }}) + r"""Low code CDK manifest JSON object""" + name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + version: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('version') }}) + + diff --git a/src/airbyte_api/models/declarativesourcedefinitionsresponse.py b/src/airbyte_api/models/declarativesourcedefinitionsresponse.py new file mode 100644 index 00000000..43888443 --- /dev/null +++ b/src/airbyte_api/models/declarativesourcedefinitionsresponse.py @@ -0,0 +1,18 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from .declarativesourcedefinitionresponse import DeclarativeSourceDefinitionResponse +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import List, Optional + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DeclarativeSourceDefinitionsResponse: + data: List[DeclarativeSourceDefinitionResponse] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data') }}) + next: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('next'), 'exclude': lambda f: f is None }}) + previous: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('previous'), 'exclude': lambda f: f is None }}) + + diff --git a/src/airbyte_api/models/definitionresponse.py b/src/airbyte_api/models/definitionresponse.py new file mode 100644 index 00000000..462948a6 --- /dev/null +++ b/src/airbyte_api/models/definitionresponse.py @@ -0,0 +1,20 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import Optional + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DefinitionResponse: + r"""Provides details of a single connector definition.""" + docker_image_tag: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dockerImageTag') }}) + docker_repository: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dockerRepository') }}) + id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id') }}) + name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + documentation_url: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('documentationUrl'), 'exclude': lambda f: f is None }}) + + diff --git a/src/airbyte_api/models/definitionsresponse.py b/src/airbyte_api/models/definitionsresponse.py new file mode 100644 index 00000000..54773055 --- /dev/null +++ b/src/airbyte_api/models/definitionsresponse.py @@ -0,0 +1,18 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from .definitionresponse import DefinitionResponse +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import List, Optional + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DefinitionsResponse: + data: List[DefinitionResponse] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data') }}) + next: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('next'), 'exclude': lambda f: f is None }}) + previous: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('previous'), 'exclude': lambda f: f is None }}) + + diff --git a/src/airbyte_api/models/destination_astra.py b/src/airbyte_api/models/destination_astra.py index af598291..529ca831 100644 --- a/src/airbyte_api/models/destination_astra.py +++ b/src/airbyte_api/models/destination_astra.py @@ -12,7 +12,7 @@ class Astra(str, Enum): ASTRA = 'astra' -class DestinationAstraSchemasEmbeddingEmbeddingMode(str, Enum): +class DestinationAstraSchemasEmbeddingEmbedding5Mode(str, Enum): OPENAI_COMPATIBLE = 'openai_compatible' @@ -25,14 +25,14 @@ class OpenAICompatible: dimensions: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dimensions') }}) r"""The number of dimensions the embedding model is generating""" api_key: Optional[str] = dataclasses.field(default='', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key'), 'exclude': lambda f: f is None }}) - MODE: Final[Optional[DestinationAstraSchemasEmbeddingEmbeddingMode]] = dataclasses.field(default=DestinationAstraSchemasEmbeddingEmbeddingMode.OPENAI_COMPATIBLE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + MODE: Final[Optional[DestinationAstraSchemasEmbeddingEmbedding5Mode]] = dataclasses.field(default=DestinationAstraSchemasEmbeddingEmbedding5Mode.OPENAI_COMPATIBLE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) model_name: Optional[str] = dataclasses.field(default='text-embedding-ada-002', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('model_name'), 'exclude': lambda f: f is None }}) r"""The name of the model to use for embedding""" -class DestinationAstraSchemasEmbeddingMode(str, Enum): +class DestinationAstraSchemasEmbeddingEmbeddingMode(str, Enum): AZURE_OPENAI = 'azure_openai' @@ -46,12 +46,12 @@ class AzureOpenAI: r"""The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource""" openai_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('openai_key') }}) r"""The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource""" - MODE: Final[Optional[DestinationAstraSchemasEmbeddingMode]] = dataclasses.field(default=DestinationAstraSchemasEmbeddingMode.AZURE_OPENAI, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + MODE: Final[Optional[DestinationAstraSchemasEmbeddingEmbeddingMode]] = dataclasses.field(default=DestinationAstraSchemasEmbeddingEmbeddingMode.AZURE_OPENAI, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) -class DestinationAstraSchemasMode(str, Enum): +class DestinationAstraSchemasEmbeddingMode(str, Enum): FAKE = 'fake' @@ -59,12 +59,12 @@ class DestinationAstraSchemasMode(str, Enum): @dataclasses.dataclass class Fake: r"""Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.""" - MODE: Final[Optional[DestinationAstraSchemasMode]] = dataclasses.field(default=DestinationAstraSchemasMode.FAKE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + MODE: Final[Optional[DestinationAstraSchemasEmbeddingMode]] = dataclasses.field(default=DestinationAstraSchemasEmbeddingMode.FAKE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) -class DestinationAstraMode(str, Enum): +class DestinationAstraSchemasMode(str, Enum): COHERE = 'cohere' @@ -73,12 +73,12 @@ class DestinationAstraMode(str, Enum): class Cohere: r"""Use the Cohere API to embed text.""" cohere_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cohere_key') }}) - MODE: Final[Optional[DestinationAstraMode]] = dataclasses.field(default=DestinationAstraMode.COHERE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + MODE: Final[Optional[DestinationAstraSchemasMode]] = dataclasses.field(default=DestinationAstraSchemasMode.COHERE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) -class DestinationAstraSchemasEmbeddingEmbedding1Mode(str, Enum): +class DestinationAstraMode(str, Enum): OPENAI = 'openai' @@ -87,7 +87,7 @@ class DestinationAstraSchemasEmbeddingEmbedding1Mode(str, Enum): class OpenAI: r"""Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.""" openai_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('openai_key') }}) - MODE: Final[Optional[DestinationAstraSchemasEmbeddingEmbedding1Mode]] = dataclasses.field(default=DestinationAstraSchemasEmbeddingEmbedding1Mode.OPENAI, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + MODE: Final[Optional[DestinationAstraMode]] = dataclasses.field(default=DestinationAstraMode.OPENAI, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/destination_azure_blob_storage.py b/src/airbyte_api/models/destination_azure_blob_storage.py index b5814290..e307426d 100644 --- a/src/airbyte_api/models/destination_azure_blob_storage.py +++ b/src/airbyte_api/models/destination_azure_blob_storage.py @@ -5,13 +5,18 @@ from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from enum import Enum -from typing import Final, Optional, Union +from typing import Any, Dict, Final, Optional, Union class DestinationAzureBlobStorageAzureBlobStorage(str, Enum): AZURE_BLOB_STORAGE = 'azure-blob-storage' +class DestinationAzureBlobStorageFlattening(str, Enum): + NO_FLATTENING = 'No flattening' + ROOT_LEVEL_FLATTENING = 'Root level flattening' + + class DestinationAzureBlobStorageFormatType(str, Enum): JSONL = 'JSONL' @@ -19,15 +24,14 @@ class DestinationAzureBlobStorageFormatType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON: - file_extension: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('file_extension'), 'exclude': lambda f: f is None }}) - r"""Add file extensions to the output file.""" - FORMAT_TYPE: Final[DestinationAzureBlobStorageFormatType] = dataclasses.field(default=DestinationAzureBlobStorageFormatType.JSONL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }}) + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + flattening: Optional[DestinationAzureBlobStorageFlattening] = dataclasses.field(default=DestinationAzureBlobStorageFlattening.NO_FLATTENING, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening'), 'exclude': lambda f: f is None }}) + format_type: Optional[DestinationAzureBlobStorageFormatType] = dataclasses.field(default=DestinationAzureBlobStorageFormatType.JSONL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type'), 'exclude': lambda f: f is None }}) -class NormalizationFlattening(str, Enum): - r"""Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.""" +class Flattening(str, Enum): NO_FLATTENING = 'No flattening' ROOT_LEVEL_FLATTENING = 'Root level flattening' @@ -39,11 +43,9 @@ class FormatType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class CSVCommaSeparatedValues: - file_extension: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('file_extension'), 'exclude': lambda f: f is None }}) - r"""Add file extensions to the output file.""" - flattening: Optional[NormalizationFlattening] = dataclasses.field(default=NormalizationFlattening.NO_FLATTENING, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening'), 'exclude': lambda f: f is None }}) - r"""Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.""" - FORMAT_TYPE: Final[FormatType] = dataclasses.field(default=FormatType.CSV, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type') }}) + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + flattening: Optional[Flattening] = dataclasses.field(default=Flattening.NO_FLATTENING, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening'), 'exclude': lambda f: f is None }}) + format_type: Optional[FormatType] = dataclasses.field(default=FormatType.CSV, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type'), 'exclude': lambda f: f is None }}) @@ -51,21 +53,21 @@ class CSVCommaSeparatedValues: @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationAzureBlobStorage: - azure_blob_storage_account_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_account_key') }}) - r"""The Azure blob storage account key.""" azure_blob_storage_account_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_account_name') }}) - r"""The account's name of the Azure Blob Storage.""" + r"""The name of the Azure Blob Storage Account. Read more here.""" + azure_blob_storage_container_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_container_name') }}) + r"""The name of the Azure Blob Storage Container. Read more here.""" format: OutputFormat = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format') }}) - r"""Output data format""" - azure_blob_storage_container_name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_container_name'), 'exclude': lambda f: f is None }}) - r"""The name of the Azure blob storage container. If not exists - will be created automatically. May be empty, then will be created automatically airbytecontainer+timestamp""" - azure_blob_storage_endpoint_domain_name: Optional[str] = dataclasses.field(default='blob.core.windows.net', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_endpoint_domain_name'), 'exclude': lambda f: f is None }}) + r"""Format of the data output.""" + azure_blob_storage_account_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_account_key'), 'exclude': lambda f: f is None }}) + r"""The Azure blob storage account key. If you set this value, you must not set the Shared Access Signature.""" + azure_blob_storage_endpoint_domain_name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_endpoint_domain_name'), 'exclude': lambda f: f is None }}) r"""This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.""" - azure_blob_storage_output_buffer_size: Optional[int] = dataclasses.field(default=5, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_output_buffer_size'), 'exclude': lambda f: f is None }}) - r"""The amount of megabytes to buffer for the output stream to Azure. This will impact memory footprint on workers, but may need adjustment for performance and appropriate block size in Azure.""" azure_blob_storage_spill_size: Optional[int] = dataclasses.field(default=500, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_spill_size'), 'exclude': lambda f: f is None }}) - r"""The amount of megabytes after which the connector should spill the records in a new blob object. Make sure to configure size greater than individual records. Enter 0 if not applicable""" + r"""The amount of megabytes after which the connector should spill the records in a new blob object. Make sure to configure size greater than individual records. Enter 0 if not applicable.""" DESTINATION_TYPE: Final[DestinationAzureBlobStorageAzureBlobStorage] = dataclasses.field(default=DestinationAzureBlobStorageAzureBlobStorage.AZURE_BLOB_STORAGE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + shared_access_signature: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('shared_access_signature'), 'exclude': lambda f: f is None }}) + r"""A shared access signature (SAS) provides secure delegated access to resources in your storage account. Read more here. If you set this value, you must not set the account key.""" diff --git a/src/airbyte_api/models/destination_bigquery.py b/src/airbyte_api/models/destination_bigquery.py index f771596c..4ace65d7 100644 --- a/src/airbyte_api/models/destination_bigquery.py +++ b/src/airbyte_api/models/destination_bigquery.py @@ -5,13 +5,20 @@ from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from enum import Enum -from typing import Final, Optional, Union +from typing import Any, Dict, Final, Optional, Union + + +class CDCDeletionMode(str, Enum): + r"""Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes.""" + HARD_DELETE = 'Hard delete' + SOFT_DELETE = 'Soft delete' class DatasetLocation(str, Enum): r"""The location of the dataset. Warning: Changes made after creation will not be applied. Read more here.""" - US = 'US' EU = 'EU' + US = 'US' + AFRICA_SOUTH1 = 'africa-south1' ASIA_EAST1 = 'asia-east1' ASIA_EAST2 = 'asia-east2' ASIA_NORTHEAST1 = 'asia-northeast1' @@ -23,30 +30,29 @@ class DatasetLocation(str, Enum): ASIA_SOUTHEAST2 = 'asia-southeast2' AUSTRALIA_SOUTHEAST1 = 'australia-southeast1' AUSTRALIA_SOUTHEAST2 = 'australia-southeast2' - EUROPE_CENTRAL1 = 'europe-central1' EUROPE_CENTRAL2 = 'europe-central2' EUROPE_NORTH1 = 'europe-north1' + EUROPE_NORTH2 = 'europe-north2' EUROPE_SOUTHWEST1 = 'europe-southwest1' EUROPE_WEST1 = 'europe-west1' EUROPE_WEST2 = 'europe-west2' EUROPE_WEST3 = 'europe-west3' EUROPE_WEST4 = 'europe-west4' EUROPE_WEST6 = 'europe-west6' - EUROPE_WEST7 = 'europe-west7' EUROPE_WEST8 = 'europe-west8' EUROPE_WEST9 = 'europe-west9' + EUROPE_WEST10 = 'europe-west10' EUROPE_WEST12 = 'europe-west12' ME_CENTRAL1 = 'me-central1' ME_CENTRAL2 = 'me-central2' ME_WEST1 = 'me-west1' NORTHAMERICA_NORTHEAST1 = 'northamerica-northeast1' NORTHAMERICA_NORTHEAST2 = 'northamerica-northeast2' + NORTHAMERICA_SOUTH1 = 'northamerica-south1' SOUTHAMERICA_EAST1 = 'southamerica-east1' SOUTHAMERICA_WEST1 = 'southamerica-west1' US_CENTRAL1 = 'us-central1' US_EAST1 = 'us-east1' - US_EAST2 = 'us-east2' - US_EAST3 = 'us-east3' US_EAST4 = 'us-east4' US_EAST5 = 'us-east5' US_SOUTH1 = 'us-south1' @@ -71,12 +77,13 @@ class DestinationBigqueryHMACKey: r"""HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.""" hmac_key_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hmac_key_secret') }}) r"""The corresponding secret for the access ID. It is a 40-character base-64 encoded string.""" - CREDENTIAL_TYPE: Final[DestinationBigqueryCredentialType] = dataclasses.field(default=DestinationBigqueryCredentialType.HMAC_KEY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credential_type') }}) + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + credential_type: Optional[DestinationBigqueryCredentialType] = dataclasses.field(default=DestinationBigqueryCredentialType.HMAC_KEY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credential_type'), 'exclude': lambda f: f is None }}) -class GCSTmpFilesAfterwardProcessing(str, Enum): +class GCSTmpFilesPostProcessing(str, Enum): r"""This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default \\"Delete all tmp files from GCS\\" value is used if not set explicitly.""" DELETE_ALL_TMP_FILES_FROM_GCS = 'Delete all tmp files from GCS' KEEP_ALL_TMP_FILES_IN_GCS = 'Keep all tmp files in GCS' @@ -96,9 +103,10 @@ class GCSStaging: r"""The name of the GCS bucket. Read more here.""" gcs_bucket_path: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('gcs_bucket_path') }}) r"""Directory under the GCS bucket where data will be written.""" - keep_files_in_gcs_bucket: Optional[GCSTmpFilesAfterwardProcessing] = dataclasses.field(default=GCSTmpFilesAfterwardProcessing.DELETE_ALL_TMP_FILES_FROM_GCS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('keep_files_in_gcs-bucket'), 'exclude': lambda f: f is None }}) + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + keep_files_in_gcs_bucket: Optional[GCSTmpFilesPostProcessing] = dataclasses.field(default=GCSTmpFilesPostProcessing.DELETE_ALL_TMP_FILES_FROM_GCS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('keep_files_in_gcs-bucket'), 'exclude': lambda f: f is None }}) r"""This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default \\"Delete all tmp files from GCS\\" value is used if not set explicitly.""" - METHOD: Final[DestinationBigqueryMethod] = dataclasses.field(default=DestinationBigqueryMethod.GCS_STAGING, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) + method: Optional[DestinationBigqueryMethod] = dataclasses.field(default=DestinationBigqueryMethod.GCS_STAGING, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method'), 'exclude': lambda f: f is None }}) @@ -111,17 +119,12 @@ class Method(str, Enum): @dataclasses.dataclass class BatchedStandardInserts: r"""Direct loading using batched SQL INSERT statements. This method uses the BigQuery driver to convert large INSERT statements into file uploads automatically.""" - METHOD: Final[Method] = dataclasses.field(default=Method.STANDARD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + method: Optional[Method] = dataclasses.field(default=Method.STANDARD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method'), 'exclude': lambda f: f is None }}) -class TransformationQueryRunType(str, Enum): - r"""Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default \\"interactive\\" value is used if not set explicitly.""" - INTERACTIVE = 'interactive' - BATCH = 'batch' - - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationBigquery: @@ -131,19 +134,17 @@ class DestinationBigquery: r"""The location of the dataset. Warning: Changes made after creation will not be applied. Read more here.""" project_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project_id') }}) r"""The GCP project ID for the project containing the target BigQuery dataset. Read more here.""" - big_query_client_buffer_size_mb: Optional[int] = dataclasses.field(default=15, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('big_query_client_buffer_size_mb'), 'exclude': lambda f: f is None }}) - r"""Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here.""" + cdc_deletion_mode: Optional[CDCDeletionMode] = dataclasses.field(default=CDCDeletionMode.HARD_DELETE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cdc_deletion_mode'), 'exclude': lambda f: f is None }}) + r"""Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes.""" credentials_json: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials_json'), 'exclude': lambda f: f is None }}) r"""The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.""" DESTINATION_TYPE: Final[Bigquery] = dataclasses.field(default=Bigquery.BIGQUERY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) disable_type_dedupe: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('disable_type_dedupe'), 'exclude': lambda f: f is None }}) - r"""Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions""" + r"""Write the legacy \\"raw tables\\" format, to enable backwards compatibility with older versions of this connector.""" loading_method: Optional[LoadingMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('loading_method'), 'exclude': lambda f: f is None }}) r"""The way data will be uploaded to BigQuery.""" raw_data_dataset: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('raw_data_dataset'), 'exclude': lambda f: f is None }}) - r"""The dataset to write raw tables into (default: airbyte_internal)""" - transformation_priority: Optional[TransformationQueryRunType] = dataclasses.field(default=TransformationQueryRunType.INTERACTIVE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('transformation_priority'), 'exclude': lambda f: f is None }}) - r"""Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default \\"interactive\\" value is used if not set explicitly.""" + r"""Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to \\"airbyte_internal\\".""" diff --git a/src/airbyte_api/models/destination_clickhouse.py b/src/airbyte_api/models/destination_clickhouse.py index e9c5a522..4d9e8c69 100644 --- a/src/airbyte_api/models/destination_clickhouse.py +++ b/src/airbyte_api/models/destination_clickhouse.py @@ -5,29 +5,35 @@ from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from enum import Enum -from typing import Final, Optional, Union +from typing import Any, Dict, Final, Optional, Union class Clickhouse(str, Enum): CLICKHOUSE = 'clickhouse' +class Protocol(str, Enum): + r"""Protocol for the database connection string.""" + HTTP = 'http' + HTTPS = 'https' + + class DestinationClickhouseSchemasTunnelMethod(str, Enum): - r"""Connect through a jump server tunnel host using username and password authentication""" SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class PasswordAuthentication: + r"""Connect through a jump server tunnel host using username and password authentication""" tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }}) r"""Hostname of the jump server host that allows inbound ssh tunnel.""" tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }}) r"""OS-level username for logging into the jump server host""" tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }}) r"""OS-level password for logging into the jump server host""" - TUNNEL_METHOD: Final[DestinationClickhouseSchemasTunnelMethod] = dataclasses.field(default=DestinationClickhouseSchemasTunnelMethod.SSH_PASSWORD_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }}) - r"""Connect through a jump server tunnel host using username and password authentication""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[DestinationClickhouseSchemasTunnelMethod] = dataclasses.field(default=DestinationClickhouseSchemasTunnelMethod.SSH_PASSWORD_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) tunnel_port: Optional[int] = dataclasses.field(default=22, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port'), 'exclude': lambda f: f is None }}) r"""Port on the proxy/jump server that accepts inbound ssh connections.""" @@ -35,21 +41,21 @@ class PasswordAuthentication: class DestinationClickhouseTunnelMethod(str, Enum): - r"""Connect through a jump server tunnel host using username and ssh key""" SSH_KEY_AUTH = 'SSH_KEY_AUTH' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SSHKeyAuthentication: + r"""Connect through a jump server tunnel host using username and ssh key""" ssh_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssh_key') }}) r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )""" tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }}) r"""Hostname of the jump server host that allows inbound ssh tunnel.""" tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }}) - r"""OS-level username for logging into the jump server host.""" - TUNNEL_METHOD: Final[DestinationClickhouseTunnelMethod] = dataclasses.field(default=DestinationClickhouseTunnelMethod.SSH_KEY_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }}) - r"""Connect through a jump server tunnel host using username and ssh key""" + r"""OS-level username for logging into the jump server host""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[DestinationClickhouseTunnelMethod] = dataclasses.field(default=DestinationClickhouseTunnelMethod.SSH_KEY_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) tunnel_port: Optional[int] = dataclasses.field(default=22, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port'), 'exclude': lambda f: f is None }}) r"""Port on the proxy/jump server that accepts inbound ssh connections.""" @@ -57,15 +63,15 @@ class SSHKeyAuthentication: class TunnelMethod(str, Enum): - r"""No ssh tunnel needed to connect to database""" NO_TUNNEL = 'NO_TUNNEL' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class NoTunnel: - TUNNEL_METHOD: Final[TunnelMethod] = dataclasses.field(default=TunnelMethod.NO_TUNNEL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }}) r"""No ssh tunnel needed to connect to database""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[TunnelMethod] = dataclasses.field(default=TunnelMethod.NO_TUNNEL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) @@ -73,25 +79,25 @@ class NoTunnel: @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationClickhouse: - database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }}) - r"""Name of the database.""" host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }}) r"""Hostname of the database.""" - username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) - r"""Username to use to access the database.""" - DESTINATION_TYPE: Final[Clickhouse] = dataclasses.field(default=Clickhouse.CLICKHOUSE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) - jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) - r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).""" - password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) r"""Password associated with the username.""" - port: Optional[int] = dataclasses.field(default=8123, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port'), 'exclude': lambda f: f is None }}) - r"""HTTP port of the database.""" - raw_data_schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('raw_data_schema'), 'exclude': lambda f: f is None }}) - r"""The schema to write raw tables into (default: airbyte_internal)""" - ssl: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl'), 'exclude': lambda f: f is None }}) - r"""Encrypt data using SSL.""" + database: Optional[str] = dataclasses.field(default='default', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database'), 'exclude': lambda f: f is None }}) + r"""Name of the database.""" + DESTINATION_TYPE: Final[Clickhouse] = dataclasses.field(default=Clickhouse.CLICKHOUSE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + enable_json: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('enable_json'), 'exclude': lambda f: f is None }}) + r"""Use the JSON type for Object fields. If disabled, the JSON will be converted to a string.""" + port: Optional[str] = dataclasses.field(default='8443', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port'), 'exclude': lambda f: f is None }}) + r"""HTTP port of the database. Default(s) HTTP: 8123 — HTTPS: 8443""" + protocol: Optional[Protocol] = dataclasses.field(default=Protocol.HTTPS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('protocol'), 'exclude': lambda f: f is None }}) + r"""Protocol for the database connection string.""" + record_window_size: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('record_window_size'), 'exclude': lambda f: f is None }}) + r"""Warning: Tuning this parameter can impact the performances. The maximum number of records that should be written to a batch. The batch size limit is still limited to 70 Mb""" tunnel_method: Optional[SSHTunnelMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + username: Optional[str] = dataclasses.field(default='default', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username'), 'exclude': lambda f: f is None }}) + r"""Username to use to access the database.""" diff --git a/src/airbyte_api/models/destination_customer_io.py b/src/airbyte_api/models/destination_customer_io.py new file mode 100644 index 00000000..b0966b88 --- /dev/null +++ b/src/airbyte_api/models/destination_customer_io.py @@ -0,0 +1,116 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Any, Dict, Final, Optional, Union + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationCustomerIoCredentials: + r"""Enter the site ID and API key to authenticate.""" + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('apiKey') }}) + r"""Enter your Customer IO API Key.""" + site_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('siteId') }}) + r"""Enter your Customer IO Site ID.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + + + + +class CustomerIo(str, Enum): + CUSTOMER_IO = 'customer-io' + + +class DestinationCustomerIoS3BucketRegion(str, Enum): + r"""The region of the S3 bucket. See here for all region codes.""" + UNKNOWN = '' + AF_SOUTH_1 = 'af-south-1' + AP_EAST_1 = 'ap-east-1' + AP_NORTHEAST_1 = 'ap-northeast-1' + AP_NORTHEAST_2 = 'ap-northeast-2' + AP_NORTHEAST_3 = 'ap-northeast-3' + AP_SOUTH_1 = 'ap-south-1' + AP_SOUTH_2 = 'ap-south-2' + AP_SOUTHEAST_1 = 'ap-southeast-1' + AP_SOUTHEAST_2 = 'ap-southeast-2' + AP_SOUTHEAST_3 = 'ap-southeast-3' + AP_SOUTHEAST_4 = 'ap-southeast-4' + CA_CENTRAL_1 = 'ca-central-1' + CA_WEST_1 = 'ca-west-1' + CN_NORTH_1 = 'cn-north-1' + CN_NORTHWEST_1 = 'cn-northwest-1' + EU_CENTRAL_1 = 'eu-central-1' + EU_CENTRAL_2 = 'eu-central-2' + EU_NORTH_1 = 'eu-north-1' + EU_SOUTH_1 = 'eu-south-1' + EU_SOUTH_2 = 'eu-south-2' + EU_WEST_1 = 'eu-west-1' + EU_WEST_2 = 'eu-west-2' + EU_WEST_3 = 'eu-west-3' + IL_CENTRAL_1 = 'il-central-1' + ME_CENTRAL_1 = 'me-central-1' + ME_SOUTH_1 = 'me-south-1' + SA_EAST_1 = 'sa-east-1' + US_EAST_1 = 'us-east-1' + US_EAST_2 = 'us-east-2' + US_GOV_EAST_1 = 'us-gov-east-1' + US_GOV_WEST_1 = 'us-gov-west-1' + US_WEST_1 = 'us-west-1' + US_WEST_2 = 'us-west-2' + + +class DestinationCustomerIoStorageType(str, Enum): + S3 = 'S3' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationCustomerIoS3: + bucket_path: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('bucket_path') }}) + r"""All files in the bucket will be prefixed by this.""" + s3_bucket_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_name') }}) + r"""The name of the S3 bucket. Read more here.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + access_key_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key_id'), 'exclude': lambda f: f is None }}) + r"""The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.""" + role_arn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('role_arn'), 'exclude': lambda f: f is None }}) + r"""The ARN of the AWS role to assume. Only usable in Airbyte Cloud.""" + s3_bucket_region: Optional[DestinationCustomerIoS3BucketRegion] = dataclasses.field(default=DestinationCustomerIoS3BucketRegion.UNKNOWN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region'), 'exclude': lambda f: f is None }}) + r"""The region of the S3 bucket. See here for all region codes.""" + s3_endpoint: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_endpoint'), 'exclude': lambda f: f is None }}) + r"""Your S3 endpoint url. Read more here""" + secret_access_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_access_key'), 'exclude': lambda f: f is None }}) + r"""The corresponding secret to the access key ID. Read more here""" + storage_type: Optional[DestinationCustomerIoStorageType] = dataclasses.field(default=DestinationCustomerIoStorageType.S3, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage_type'), 'exclude': lambda f: f is None }}) + + + + +class StorageType(str, Enum): + NONE = 'None' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class NoneT: + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + storage_type: Optional[StorageType] = dataclasses.field(default=StorageType.NONE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage_type'), 'exclude': lambda f: f is None }}) + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationCustomerIo: + credentials: DestinationCustomerIoCredentials = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }}) + r"""Enter the site ID and API key to authenticate.""" + DESTINATION_TYPE: Final[CustomerIo] = dataclasses.field(default=CustomerIo.CUSTOMER_IO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + object_storage_config: Optional[ObjectStorageConfiguration] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('object_storage_config'), 'exclude': lambda f: f is None }}) + + + +ObjectStorageConfiguration = Union[NoneT, DestinationCustomerIoS3] diff --git a/src/airbyte_api/models/destination_deepset.py b/src/airbyte_api/models/destination_deepset.py new file mode 100644 index 00000000..1db52138 --- /dev/null +++ b/src/airbyte_api/models/destination_deepset.py @@ -0,0 +1,28 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class Deepset(str, Enum): + DEEPSET = 'deepset' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationDeepset: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your deepset cloud API key""" + workspace: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspace') }}) + r"""Name of workspace to which to sync the data.""" + base_url: Optional[str] = dataclasses.field(default='https://api.cloud.deepset.ai', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('base_url'), 'exclude': lambda f: f is None }}) + r"""URL of deepset Cloud API (e.g. https://api.cloud.deepset.ai, https://api.us.deepset.ai, etc). Defaults to https://api.cloud.deepset.ai.""" + DESTINATION_TYPE: Final[Deepset] = dataclasses.field(default=Deepset.DEEPSET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + retries: Optional[float] = dataclasses.field(default=5, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('retries'), 'exclude': lambda f: f is None }}) + r"""Number of times to retry an action before giving up.""" + + diff --git a/src/airbyte_api/models/destination_elasticsearch.py b/src/airbyte_api/models/destination_elasticsearch.py index 8225280e..2a0c47f9 100644 --- a/src/airbyte_api/models/destination_elasticsearch.py +++ b/src/airbyte_api/models/destination_elasticsearch.py @@ -48,7 +48,7 @@ class DestinationElasticsearchMethod(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class NoneT: +class DestinationElasticsearchNone: r"""No authentication will be used""" METHOD: Final[DestinationElasticsearchMethod] = dataclasses.field(default=DestinationElasticsearchMethod.NONE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) @@ -127,6 +127,8 @@ class DestinationElasticsearch: ca_certificate: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ca_certificate'), 'exclude': lambda f: f is None }}) r"""CA certificate""" DESTINATION_TYPE: Final[Elasticsearch] = dataclasses.field(default=Elasticsearch.ELASTICSEARCH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + path_prefix: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('pathPrefix'), 'exclude': lambda f: f is None }}) + r"""The Path Prefix of the Elasticsearch server""" tunnel_method: Optional[DestinationElasticsearchSSHTunnelMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" upsert: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('upsert'), 'exclude': lambda f: f is None }}) @@ -134,6 +136,6 @@ class DestinationElasticsearch: -AuthenticationMethod = Union[NoneT, APIKeySecret, UsernamePassword] +AuthenticationMethod = Union[DestinationElasticsearchNone, APIKeySecret, UsernamePassword] DestinationElasticsearchSSHTunnelMethod = Union[DestinationElasticsearchNoTunnel, DestinationElasticsearchSSHKeyAuthentication, DestinationElasticsearchPasswordAuthentication] diff --git a/src/airbyte_api/models/destination_google_sheets.py b/src/airbyte_api/models/destination_google_sheets.py index 7fa81030..84659f7a 100644 --- a/src/airbyte_api/models/destination_google_sheets.py +++ b/src/airbyte_api/models/destination_google_sheets.py @@ -5,19 +5,37 @@ from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from enum import Enum -from typing import Final +from typing import Final, Optional, Union + + +class DestinationGoogleSheetsSchemasAuthType(str, Enum): + SERVICE = 'service' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class AuthenticationViaGoogleOAuth: - r"""Google API Credentials for connecting to Google Sheets and Google Drive APIs""" +class ServiceAccountKeyAuthentication: + service_account_info: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('service_account_info') }}) + r"""Enter your service account key in JSON format. See the docs for more information on how to generate this key.""" + AUTH_TYPE: Final[Optional[DestinationGoogleSheetsSchemasAuthType]] = dataclasses.field(default=DestinationGoogleSheetsSchemasAuthType.SERVICE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) + + + + +class DestinationGoogleSheetsAuthType(str, Enum): + OAUTH2_0 = 'oauth2.0' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class AuthenticateViaGoogleOAuth: client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) r"""The Client ID of your Google Sheets developer application.""" client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) r"""The Client Secret of your Google Sheets developer application.""" refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }}) r"""The token for obtaining new access token.""" + AUTH_TYPE: Final[Optional[DestinationGoogleSheetsAuthType]] = dataclasses.field(default=DestinationGoogleSheetsAuthType.OAUTH2_0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) @@ -29,10 +47,12 @@ class DestinationGoogleSheetsGoogleSheets(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationGoogleSheets: - credentials: AuthenticationViaGoogleOAuth = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }}) - r"""Google API Credentials for connecting to Google Sheets and Google Drive APIs""" + credentials: DestinationGoogleSheetsAuthentication = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }}) + r"""Authentication method to access Google Sheets""" spreadsheet_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('spreadsheet_id') }}) r"""The link to your spreadsheet. See this guide for more details.""" DESTINATION_TYPE: Final[DestinationGoogleSheetsGoogleSheets] = dataclasses.field(default=DestinationGoogleSheetsGoogleSheets.GOOGLE_SHEETS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + +DestinationGoogleSheetsAuthentication = Union[AuthenticateViaGoogleOAuth, ServiceAccountKeyAuthentication] diff --git a/src/airbyte_api/models/destination_hubspot.py b/src/airbyte_api/models/destination_hubspot.py new file mode 100644 index 00000000..554e6af8 --- /dev/null +++ b/src/airbyte_api/models/destination_hubspot.py @@ -0,0 +1,124 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Any, Dict, Final, Optional, Union + + +class Type(str, Enum): + O_AUTH = 'OAuth' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class OAuth: + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + r"""The Client ID of your HubSpot developer application. See the Hubspot docs if you need help finding this ID.""" + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + r"""The client secret for your HubSpot developer application. See the Hubspot docs if you need help finding this secret.""" + refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }}) + r"""Refresh token to renew an expired access token. See the Hubspot docs if you need help finding this token.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + type: Optional[Type] = dataclasses.field(default=Type.O_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('type'), 'exclude': lambda f: f is None }}) + + + + +class DestinationHubspotHubspot(str, Enum): + HUBSPOT = 'hubspot' + + +class DestinationHubspotS3BucketRegion(str, Enum): + r"""The region of the S3 bucket. See here for all region codes.""" + UNKNOWN = '' + AF_SOUTH_1 = 'af-south-1' + AP_EAST_1 = 'ap-east-1' + AP_NORTHEAST_1 = 'ap-northeast-1' + AP_NORTHEAST_2 = 'ap-northeast-2' + AP_NORTHEAST_3 = 'ap-northeast-3' + AP_SOUTH_1 = 'ap-south-1' + AP_SOUTH_2 = 'ap-south-2' + AP_SOUTHEAST_1 = 'ap-southeast-1' + AP_SOUTHEAST_2 = 'ap-southeast-2' + AP_SOUTHEAST_3 = 'ap-southeast-3' + AP_SOUTHEAST_4 = 'ap-southeast-4' + CA_CENTRAL_1 = 'ca-central-1' + CA_WEST_1 = 'ca-west-1' + CN_NORTH_1 = 'cn-north-1' + CN_NORTHWEST_1 = 'cn-northwest-1' + EU_CENTRAL_1 = 'eu-central-1' + EU_CENTRAL_2 = 'eu-central-2' + EU_NORTH_1 = 'eu-north-1' + EU_SOUTH_1 = 'eu-south-1' + EU_SOUTH_2 = 'eu-south-2' + EU_WEST_1 = 'eu-west-1' + EU_WEST_2 = 'eu-west-2' + EU_WEST_3 = 'eu-west-3' + IL_CENTRAL_1 = 'il-central-1' + ME_CENTRAL_1 = 'me-central-1' + ME_SOUTH_1 = 'me-south-1' + SA_EAST_1 = 'sa-east-1' + US_EAST_1 = 'us-east-1' + US_EAST_2 = 'us-east-2' + US_GOV_EAST_1 = 'us-gov-east-1' + US_GOV_WEST_1 = 'us-gov-west-1' + US_WEST_1 = 'us-west-1' + US_WEST_2 = 'us-west-2' + + +class DestinationHubspotSchemasStorageType(str, Enum): + S3 = 'S3' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationHubspotS3: + bucket_path: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('bucket_path') }}) + r"""All files in the bucket will be prefixed by this.""" + s3_bucket_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_name') }}) + r"""The name of the S3 bucket. Read more here.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + access_key_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key_id'), 'exclude': lambda f: f is None }}) + r"""The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.""" + role_arn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('role_arn'), 'exclude': lambda f: f is None }}) + r"""The ARN of the AWS role to assume. Only usable in Airbyte Cloud.""" + s3_bucket_region: Optional[DestinationHubspotS3BucketRegion] = dataclasses.field(default=DestinationHubspotS3BucketRegion.UNKNOWN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region'), 'exclude': lambda f: f is None }}) + r"""The region of the S3 bucket. See here for all region codes.""" + s3_endpoint: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_endpoint'), 'exclude': lambda f: f is None }}) + r"""Your S3 endpoint url. Read more here""" + secret_access_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_access_key'), 'exclude': lambda f: f is None }}) + r"""The corresponding secret to the access key ID. Read more here""" + storage_type: Optional[DestinationHubspotSchemasStorageType] = dataclasses.field(default=DestinationHubspotSchemasStorageType.S3, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage_type'), 'exclude': lambda f: f is None }}) + + + + +class DestinationHubspotStorageType(str, Enum): + NONE = 'None' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationHubspotNone: + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + storage_type: Optional[DestinationHubspotStorageType] = dataclasses.field(default=DestinationHubspotStorageType.NONE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage_type'), 'exclude': lambda f: f is None }}) + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationHubspot: + credentials: DestinationHubspotCredentials = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }}) + r"""Choose how to authenticate to HubSpot.""" + DESTINATION_TYPE: Final[DestinationHubspotHubspot] = dataclasses.field(default=DestinationHubspotHubspot.HUBSPOT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + object_storage_config: Optional[DestinationHubspotObjectStorageConfiguration] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('object_storage_config'), 'exclude': lambda f: f is None }}) + + + +DestinationHubspotCredentials = Union[OAuth] + +DestinationHubspotObjectStorageConfiguration = Union[DestinationHubspotNone, DestinationHubspotS3] diff --git a/src/airbyte_api/models/destination_iceberg.py b/src/airbyte_api/models/destination_iceberg.py deleted file mode 100644 index facaf362..00000000 --- a/src/airbyte_api/models/destination_iceberg.py +++ /dev/null @@ -1,214 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from airbyte_api import utils -from dataclasses_json import Undefined, dataclass_json -from enum import Enum -from typing import Final, Optional, Union - - -class DestinationIcebergSchemasCatalogConfigIcebergCatalogConfigCatalogType(str, Enum): - GLUE = 'Glue' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class GlueCatalog: - r"""The GlueCatalog connects to a AWS Glue Catalog""" - catalog_type: Optional[DestinationIcebergSchemasCatalogConfigIcebergCatalogConfigCatalogType] = dataclasses.field(default=DestinationIcebergSchemasCatalogConfigIcebergCatalogConfigCatalogType.GLUE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('catalog_type'), 'exclude': lambda f: f is None }}) - database: Optional[str] = dataclasses.field(default='public', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database'), 'exclude': lambda f: f is None }}) - r"""The default schema tables are written to if the source does not specify a namespace. The usual value for this field is \\"public\\".""" - - - - -class DestinationIcebergSchemasCatalogConfigCatalogType(str, Enum): - REST = 'Rest' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class RESTCatalog: - r"""The RESTCatalog connects to a REST server at the specified URI""" - rest_uri: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('rest_uri') }}) - catalog_type: Optional[DestinationIcebergSchemasCatalogConfigCatalogType] = dataclasses.field(default=DestinationIcebergSchemasCatalogConfigCatalogType.REST, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('catalog_type'), 'exclude': lambda f: f is None }}) - rest_credential: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('rest_credential'), 'exclude': lambda f: f is None }}) - rest_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('rest_token'), 'exclude': lambda f: f is None }}) - - - - -class DestinationIcebergSchemasCatalogType(str, Enum): - JDBC = 'Jdbc' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class JdbcCatalogUseRelationalDatabase: - r"""Using a table in a relational database to manage Iceberg tables through JDBC. Read more here. Supporting: PostgreSQL""" - catalog_schema: Optional[str] = dataclasses.field(default='public', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('catalog_schema'), 'exclude': lambda f: f is None }}) - r"""Iceberg catalog metadata tables are written to catalog schema. The usual value for this field is \\"public\\".""" - catalog_type: Optional[DestinationIcebergSchemasCatalogType] = dataclasses.field(default=DestinationIcebergSchemasCatalogType.JDBC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('catalog_type'), 'exclude': lambda f: f is None }}) - database: Optional[str] = dataclasses.field(default='public', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database'), 'exclude': lambda f: f is None }}) - r"""The default schema tables are written to if the source does not specify a namespace. The usual value for this field is \\"public\\".""" - jdbc_url: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url'), 'exclude': lambda f: f is None }}) - password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) - r"""Password associated with the username.""" - ssl: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl'), 'exclude': lambda f: f is None }}) - r"""Encrypt data using SSL. When activating SSL, please select one of the connection modes.""" - username: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username'), 'exclude': lambda f: f is None }}) - r"""Username to use to access the database.""" - - - - -class DestinationIcebergCatalogType(str, Enum): - HADOOP = 'Hadoop' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class HadoopCatalogUseHierarchicalFileSystemsAsSameAsStorageConfig: - r"""A Hadoop catalog doesn’t need to connect to a Hive MetaStore, but can only be used with HDFS or similar file systems that support atomic rename.""" - catalog_type: Optional[DestinationIcebergCatalogType] = dataclasses.field(default=DestinationIcebergCatalogType.HADOOP, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('catalog_type'), 'exclude': lambda f: f is None }}) - database: Optional[str] = dataclasses.field(default='default', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database'), 'exclude': lambda f: f is None }}) - r"""The default database tables are written to if the source does not specify a namespace. The usual value for this field is \\"default\\".""" - - - - -class CatalogType(str, Enum): - HIVE = 'Hive' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class HiveCatalogUseApacheHiveMetaStore: - hive_thrift_uri: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hive_thrift_uri') }}) - r"""Hive MetaStore thrift server uri of iceberg catalog.""" - catalog_type: Optional[CatalogType] = dataclasses.field(default=CatalogType.HIVE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('catalog_type'), 'exclude': lambda f: f is None }}) - database: Optional[str] = dataclasses.field(default='default', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database'), 'exclude': lambda f: f is None }}) - r"""The default database tables are written to if the source does not specify a namespace. The usual value for this field is \\"default\\".""" - - - - -class Iceberg(str, Enum): - ICEBERG = 'iceberg' - - -class FileStorageFormat(str, Enum): - PARQUET = 'Parquet' - AVRO = 'Avro' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class FileFormat: - r"""File format of Iceberg storage.""" - auto_compact: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auto_compact'), 'exclude': lambda f: f is None }}) - r"""Auto compact data files when stream close""" - compact_target_file_size_in_mb: Optional[int] = dataclasses.field(default=100, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compact_target_file_size_in_mb'), 'exclude': lambda f: f is None }}) - r"""Specify the target size of Iceberg data file when performing a compaction action.""" - flush_batch_size: Optional[int] = dataclasses.field(default=10000, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flush_batch_size'), 'exclude': lambda f: f is None }}) - r"""Iceberg data file flush batch size. Incoming rows write to cache firstly; When cache size reaches this 'batch size', flush into real Iceberg data file.""" - format: Optional[FileStorageFormat] = dataclasses.field(default=FileStorageFormat.PARQUET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format'), 'exclude': lambda f: f is None }}) - - - - -class DestinationIcebergStorageType(str, Enum): - MANAGED = 'MANAGED' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ServerManaged: - r"""Server-managed object storage""" - managed_warehouse_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('managed_warehouse_name') }}) - r"""The name of the managed warehouse""" - storage_type: Optional[DestinationIcebergStorageType] = dataclasses.field(default=DestinationIcebergStorageType.MANAGED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage_type'), 'exclude': lambda f: f is None }}) - - - - -class DestinationIcebergS3BucketRegion(str, Enum): - r"""The region of the S3 bucket. See here for all region codes.""" - UNKNOWN = '' - AF_SOUTH_1 = 'af-south-1' - AP_EAST_1 = 'ap-east-1' - AP_NORTHEAST_1 = 'ap-northeast-1' - AP_NORTHEAST_2 = 'ap-northeast-2' - AP_NORTHEAST_3 = 'ap-northeast-3' - AP_SOUTH_1 = 'ap-south-1' - AP_SOUTH_2 = 'ap-south-2' - AP_SOUTHEAST_1 = 'ap-southeast-1' - AP_SOUTHEAST_2 = 'ap-southeast-2' - AP_SOUTHEAST_3 = 'ap-southeast-3' - AP_SOUTHEAST_4 = 'ap-southeast-4' - CA_CENTRAL_1 = 'ca-central-1' - CA_WEST_1 = 'ca-west-1' - CN_NORTH_1 = 'cn-north-1' - CN_NORTHWEST_1 = 'cn-northwest-1' - EU_CENTRAL_1 = 'eu-central-1' - EU_CENTRAL_2 = 'eu-central-2' - EU_NORTH_1 = 'eu-north-1' - EU_SOUTH_1 = 'eu-south-1' - EU_SOUTH_2 = 'eu-south-2' - EU_WEST_1 = 'eu-west-1' - EU_WEST_2 = 'eu-west-2' - EU_WEST_3 = 'eu-west-3' - IL_CENTRAL_1 = 'il-central-1' - ME_CENTRAL_1 = 'me-central-1' - ME_SOUTH_1 = 'me-south-1' - SA_EAST_1 = 'sa-east-1' - US_EAST_1 = 'us-east-1' - US_EAST_2 = 'us-east-2' - US_GOV_EAST_1 = 'us-gov-east-1' - US_GOV_WEST_1 = 'us-gov-west-1' - US_WEST_1 = 'us-west-1' - US_WEST_2 = 'us-west-2' - - -class StorageType(str, Enum): - S3 = 'S3' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DestinationIcebergS3: - r"""S3 object storage""" - access_key_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key_id') }}) - r"""The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.""" - s3_warehouse_uri: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_warehouse_uri') }}) - r"""The Warehouse Uri for Iceberg""" - secret_access_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_access_key') }}) - r"""The corresponding secret to the access key ID. Read more here""" - s3_bucket_region: Optional[DestinationIcebergS3BucketRegion] = dataclasses.field(default=DestinationIcebergS3BucketRegion.UNKNOWN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region'), 'exclude': lambda f: f is None }}) - r"""The region of the S3 bucket. See here for all region codes.""" - s3_endpoint: Optional[str] = dataclasses.field(default='', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_endpoint'), 'exclude': lambda f: f is None }}) - r"""Your S3 endpoint url. Read more here""" - s3_path_style_access: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_path_style_access'), 'exclude': lambda f: f is None }}) - r"""Use path style access""" - storage_type: Optional[StorageType] = dataclasses.field(default=StorageType.S3, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage_type'), 'exclude': lambda f: f is None }}) - - - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DestinationIceberg: - catalog_config: IcebergCatalogConfig = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('catalog_config') }}) - r"""Catalog config of Iceberg.""" - format_config: FileFormat = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_config') }}) - r"""File format of Iceberg storage.""" - storage_config: StorageConfig = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage_config') }}) - r"""Storage config of Iceberg.""" - DESTINATION_TYPE: Final[Iceberg] = dataclasses.field(default=Iceberg.ICEBERG, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) - - - -IcebergCatalogConfig = Union[HiveCatalogUseApacheHiveMetaStore, HadoopCatalogUseHierarchicalFileSystemsAsSameAsStorageConfig, JdbcCatalogUseRelationalDatabase, RESTCatalog, GlueCatalog] - -StorageConfig = Union[DestinationIcebergS3, ServerManaged] diff --git a/src/airbyte_api/models/destination_mssql.py b/src/airbyte_api/models/destination_mssql.py index 7a2896b9..1e4a2ae0 100644 --- a/src/airbyte_api/models/destination_mssql.py +++ b/src/airbyte_api/models/destination_mssql.py @@ -5,14 +5,54 @@ from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from enum import Enum -from typing import Final, Optional, Union +from typing import Any, Dict, Final, Optional, Union class Mssql(str, Enum): MSSQL = 'mssql' -class DestinationMssqlSchemasSslMethodSslMethod(str, Enum): +class DestinationMssqlLoadType(str, Enum): + BULK = 'BULK' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class BulkLoad: + r"""Configuration details for using the BULK loading mechanism.""" + azure_blob_storage_account_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_account_name') }}) + r"""The name of the Azure Blob Storage account. See: https://learn.microsoft.com/azure/storage/blobs/storage-blobs-introduction#storage-accounts""" + azure_blob_storage_container_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_container_name') }}) + r"""The name of the Azure Blob Storage container. See: https://learn.microsoft.com/azure/storage/blobs/storage-blobs-introduction#containers""" + bulk_load_data_source: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('bulk_load_data_source') }}) + r"""Specifies the external data source name configured in MSSQL, which references the Azure Blob container. See: https://learn.microsoft.com/sql/t-sql/statements/bulk-insert-transact-sql""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + azure_blob_storage_account_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_account_key'), 'exclude': lambda f: f is None }}) + r"""The Azure blob storage account key. Mutually exclusive with a Shared Access Signature""" + bulk_load_validate_values_pre_load: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('bulk_load_validate_values_pre_load'), 'exclude': lambda f: f is None }}) + r"""When enabled, Airbyte will validate all values before loading them into the destination table. This provides stronger data integrity guarantees but may significantly impact performance.""" + load_type: Optional[DestinationMssqlLoadType] = dataclasses.field(default=DestinationMssqlLoadType.BULK, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('load_type'), 'exclude': lambda f: f is None }}) + shared_access_signature: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('shared_access_signature'), 'exclude': lambda f: f is None }}) + r"""A shared access signature (SAS) provides secure delegated access to resources in your storage account. See: https://learn.microsoft.com/azure/storage/common/storage-sas-overview.Mutually exclusive with an account key""" + + + + +class DestinationMssqlSchemasLoadType(str, Enum): + INSERT = 'INSERT' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class InsertLoad: + r"""Configuration details for using the INSERT loading mechanism.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + load_type: Optional[DestinationMssqlSchemasLoadType] = dataclasses.field(default=DestinationMssqlSchemasLoadType.INSERT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('load_type'), 'exclude': lambda f: f is None }}) + + + + +class DestinationMssqlSchemasName(str, Enum): ENCRYPTED_VERIFY_CERTIFICATE = 'encrypted_verify_certificate' @@ -20,14 +60,19 @@ class DestinationMssqlSchemasSslMethodSslMethod(str, Enum): @dataclasses.dataclass class EncryptedVerifyCertificate: r"""Verify and use the certificate provided by the server.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) host_name_in_certificate: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hostNameInCertificate'), 'exclude': lambda f: f is None }}) r"""Specifies the host name of the server. The value of this property must match the subject property of the certificate.""" - SSL_METHOD: Final[Optional[DestinationMssqlSchemasSslMethodSslMethod]] = dataclasses.field(default=DestinationMssqlSchemasSslMethodSslMethod.ENCRYPTED_VERIFY_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method'), 'exclude': lambda f: f is None }}) + name: Optional[DestinationMssqlSchemasName] = dataclasses.field(default=DestinationMssqlSchemasName.ENCRYPTED_VERIFY_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name'), 'exclude': lambda f: f is None }}) + trust_store_name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('trustStoreName'), 'exclude': lambda f: f is None }}) + r"""Specifies the name of the trust store.""" + trust_store_password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('trustStorePassword'), 'exclude': lambda f: f is None }}) + r"""Specifies the password of the trust store.""" -class DestinationMssqlSchemasSslMethod(str, Enum): +class DestinationMssqlName(str, Enum): ENCRYPTED_TRUST_SERVER_CERTIFICATE = 'encrypted_trust_server_certificate' @@ -35,12 +80,13 @@ class DestinationMssqlSchemasSslMethod(str, Enum): @dataclasses.dataclass class EncryptedTrustServerCertificate: r"""Use the certificate provided by the server without verification. (For testing purposes only!)""" - SSL_METHOD: Final[Optional[DestinationMssqlSchemasSslMethod]] = dataclasses.field(default=DestinationMssqlSchemasSslMethod.ENCRYPTED_TRUST_SERVER_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method'), 'exclude': lambda f: f is None }}) + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + name: Optional[DestinationMssqlName] = dataclasses.field(default=DestinationMssqlName.ENCRYPTED_TRUST_SERVER_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name'), 'exclude': lambda f: f is None }}) -class DestinationMssqlSslMethod(str, Enum): +class Name(str, Enum): UNENCRYPTED = 'unencrypted' @@ -48,27 +94,28 @@ class DestinationMssqlSslMethod(str, Enum): @dataclasses.dataclass class Unencrypted: r"""The data transfer will not be encrypted.""" - SSL_METHOD: Final[Optional[DestinationMssqlSslMethod]] = dataclasses.field(default=DestinationMssqlSslMethod.UNENCRYPTED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method'), 'exclude': lambda f: f is None }}) + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + name: Optional[Name] = dataclasses.field(default=Name.UNENCRYPTED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name'), 'exclude': lambda f: f is None }}) class DestinationMssqlSchemasTunnelMethodTunnelMethod(str, Enum): - r"""Connect through a jump server tunnel host using username and password authentication""" SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationMssqlPasswordAuthentication: + r"""Connect through a jump server tunnel host using username and password authentication""" tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }}) r"""Hostname of the jump server host that allows inbound ssh tunnel.""" tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }}) r"""OS-level username for logging into the jump server host""" tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }}) r"""OS-level password for logging into the jump server host""" - TUNNEL_METHOD: Final[DestinationMssqlSchemasTunnelMethodTunnelMethod] = dataclasses.field(default=DestinationMssqlSchemasTunnelMethodTunnelMethod.SSH_PASSWORD_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }}) - r"""Connect through a jump server tunnel host using username and password authentication""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[DestinationMssqlSchemasTunnelMethodTunnelMethod] = dataclasses.field(default=DestinationMssqlSchemasTunnelMethodTunnelMethod.SSH_PASSWORD_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) tunnel_port: Optional[int] = dataclasses.field(default=22, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port'), 'exclude': lambda f: f is None }}) r"""Port on the proxy/jump server that accepts inbound ssh connections.""" @@ -76,21 +123,21 @@ class DestinationMssqlPasswordAuthentication: class DestinationMssqlSchemasTunnelMethod(str, Enum): - r"""Connect through a jump server tunnel host using username and ssh key""" SSH_KEY_AUTH = 'SSH_KEY_AUTH' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationMssqlSSHKeyAuthentication: + r"""Connect through a jump server tunnel host using username and ssh key""" ssh_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssh_key') }}) r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )""" tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }}) r"""Hostname of the jump server host that allows inbound ssh tunnel.""" tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }}) - r"""OS-level username for logging into the jump server host.""" - TUNNEL_METHOD: Final[DestinationMssqlSchemasTunnelMethod] = dataclasses.field(default=DestinationMssqlSchemasTunnelMethod.SSH_KEY_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }}) - r"""Connect through a jump server tunnel host using username and ssh key""" + r"""OS-level username for logging into the jump server host""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[DestinationMssqlSchemasTunnelMethod] = dataclasses.field(default=DestinationMssqlSchemasTunnelMethod.SSH_KEY_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) tunnel_port: Optional[int] = dataclasses.field(default=22, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port'), 'exclude': lambda f: f is None }}) r"""Port on the proxy/jump server that accepts inbound ssh connections.""" @@ -98,15 +145,15 @@ class DestinationMssqlSSHKeyAuthentication: class DestinationMssqlTunnelMethod(str, Enum): - r"""No ssh tunnel needed to connect to database""" NO_TUNNEL = 'NO_TUNNEL' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationMssqlNoTunnel: - TUNNEL_METHOD: Final[DestinationMssqlTunnelMethod] = dataclasses.field(default=DestinationMssqlTunnelMethod.NO_TUNNEL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }}) r"""No ssh tunnel needed to connect to database""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[DestinationMssqlTunnelMethod] = dataclasses.field(default=DestinationMssqlTunnelMethod.NO_TUNNEL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) @@ -118,26 +165,28 @@ class DestinationMssql: r"""The name of the MSSQL database.""" host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }}) r"""The host name of the MSSQL database.""" - username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + load_type: LoadType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('load_type') }}) + r"""Specifies the type of load mechanism (e.g., BULK, INSERT) and its associated configuration.""" + port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }}) + r"""The port of the MSSQL database.""" + ssl_method: SSLMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }}) + r"""The encryption method which is used to communicate with the database.""" + user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('user') }}) r"""The username which is used to access the database.""" DESTINATION_TYPE: Final[Mssql] = dataclasses.field(default=Mssql.MSSQL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).""" password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) r"""The password associated with this username.""" - port: Optional[int] = dataclasses.field(default=1433, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port'), 'exclude': lambda f: f is None }}) - r"""The port of the MSSQL database.""" - raw_data_schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('raw_data_schema'), 'exclude': lambda f: f is None }}) - r"""The schema to write raw tables into (default: airbyte_internal)""" schema: Optional[str] = dataclasses.field(default='public', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schema'), 'exclude': lambda f: f is None }}) r"""The default schema tables are written to if the source does not specify a namespace. The usual value for this field is \\"public\\".""" - ssl_method: Optional[SSLMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method'), 'exclude': lambda f: f is None }}) - r"""The encryption method which is used to communicate with the database.""" tunnel_method: Optional[DestinationMssqlSSHTunnelMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" +LoadType = Union[InsertLoad, BulkLoad] + SSLMethod = Union[Unencrypted, EncryptedTrustServerCertificate, EncryptedVerifyCertificate] DestinationMssqlSSHTunnelMethod = Union[DestinationMssqlNoTunnel, DestinationMssqlSSHKeyAuthentication, DestinationMssqlPasswordAuthentication] diff --git a/src/airbyte_api/models/destination_mssql_v2.py b/src/airbyte_api/models/destination_mssql_v2.py new file mode 100644 index 00000000..fcf61615 --- /dev/null +++ b/src/airbyte_api/models/destination_mssql_v2.py @@ -0,0 +1,128 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Any, Dict, Final, Optional, Union + + +class MssqlV2(str, Enum): + MSSQL_V2 = 'mssql-v2' + + +class DestinationMssqlV2SchemasLoadType(str, Enum): + BULK = 'BULK' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationMssqlV2BulkLoad: + r"""Configuration details for using the BULK loading mechanism.""" + azure_blob_storage_account_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_account_name') }}) + r"""The name of the Azure Blob Storage account. See: https://learn.microsoft.com/azure/storage/blobs/storage-blobs-introduction#storage-accounts""" + azure_blob_storage_container_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_container_name') }}) + r"""The name of the Azure Blob Storage container. See: https://learn.microsoft.com/azure/storage/blobs/storage-blobs-introduction#containers""" + bulk_load_data_source: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('bulk_load_data_source') }}) + r"""Specifies the external data source name configured in MSSQL, which references the Azure Blob container. See: https://learn.microsoft.com/sql/t-sql/statements/bulk-insert-transact-sql""" + shared_access_signature: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('shared_access_signature') }}) + r"""A shared access signature (SAS) provides secure delegated access to resources in your storage account. See: https://learn.microsoft.com/azure/storage/common/storage-sas-overview""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + bulk_load_validate_values_pre_load: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('bulk_load_validate_values_pre_load'), 'exclude': lambda f: f is None }}) + r"""When enabled, Airbyte will validate all values before loading them into the destination table. This provides stronger data integrity guarantees but may significantly impact performance.""" + load_type: Optional[DestinationMssqlV2SchemasLoadType] = dataclasses.field(default=DestinationMssqlV2SchemasLoadType.BULK, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('load_type'), 'exclude': lambda f: f is None }}) + + + + +class DestinationMssqlV2SchemasLoadTypeLoadType(str, Enum): + INSERT = 'INSERT' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationMssqlV2InsertLoad: + r"""Configuration details for using the INSERT loading mechanism.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + load_type: Optional[DestinationMssqlV2SchemasLoadTypeLoadType] = dataclasses.field(default=DestinationMssqlV2SchemasLoadTypeLoadType.INSERT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('load_type'), 'exclude': lambda f: f is None }}) + + + + +class DestinationMssqlV2SchemasSslMethodName(str, Enum): + ENCRYPTED_VERIFY_CERTIFICATE = 'encrypted_verify_certificate' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationMssqlV2EncryptedVerifyCertificate: + r"""Verify and use the certificate provided by the server.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + host_name_in_certificate: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hostNameInCertificate'), 'exclude': lambda f: f is None }}) + r"""Specifies the host name of the server. The value of this property must match the subject property of the certificate.""" + name: Optional[DestinationMssqlV2SchemasSslMethodName] = dataclasses.field(default=DestinationMssqlV2SchemasSslMethodName.ENCRYPTED_VERIFY_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name'), 'exclude': lambda f: f is None }}) + trust_store_name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('trustStoreName'), 'exclude': lambda f: f is None }}) + r"""Specifies the name of the trust store.""" + trust_store_password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('trustStorePassword'), 'exclude': lambda f: f is None }}) + r"""Specifies the password of the trust store.""" + + + + +class DestinationMssqlV2SchemasName(str, Enum): + ENCRYPTED_TRUST_SERVER_CERTIFICATE = 'encrypted_trust_server_certificate' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationMssqlV2EncryptedTrustServerCertificate: + r"""Use the certificate provided by the server without verification. (For testing purposes only!)""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + name: Optional[DestinationMssqlV2SchemasName] = dataclasses.field(default=DestinationMssqlV2SchemasName.ENCRYPTED_TRUST_SERVER_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name'), 'exclude': lambda f: f is None }}) + + + + +class DestinationMssqlV2Name(str, Enum): + UNENCRYPTED = 'unencrypted' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationMssqlV2Unencrypted: + r"""The data transfer will not be encrypted.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + name: Optional[DestinationMssqlV2Name] = dataclasses.field(default=DestinationMssqlV2Name.UNENCRYPTED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name'), 'exclude': lambda f: f is None }}) + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationMssqlV2: + database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }}) + r"""The name of the MSSQL database.""" + host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }}) + r"""The host name of the MSSQL database.""" + load_type: DestinationMssqlV2LoadType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('load_type') }}) + r"""Specifies the type of load mechanism (e.g., BULK, INSERT) and its associated configuration.""" + port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }}) + r"""The port of the MSSQL database.""" + ssl_method: DestinationMssqlV2SSLMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }}) + r"""The encryption method which is used to communicate with the database.""" + user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('user') }}) + r"""The username which is used to access the database.""" + DESTINATION_TYPE: Final[MssqlV2] = dataclasses.field(default=MssqlV2.MSSQL_V2, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) + r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).""" + password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + r"""The password associated with this username.""" + schema: Optional[str] = dataclasses.field(default='public', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schema'), 'exclude': lambda f: f is None }}) + r"""The default schema tables are written to if the source does not specify a namespace. The usual value for this field is \\"public\\".""" + + + +DestinationMssqlV2LoadType = Union[DestinationMssqlV2InsertLoad, DestinationMssqlV2BulkLoad] + +DestinationMssqlV2SSLMethod = Union[DestinationMssqlV2Unencrypted, DestinationMssqlV2EncryptedTrustServerCertificate, DestinationMssqlV2EncryptedVerifyCertificate] diff --git a/src/airbyte_api/models/destination_oracle.py b/src/airbyte_api/models/destination_oracle.py index 37d5460e..bd97b2f1 100644 --- a/src/airbyte_api/models/destination_oracle.py +++ b/src/airbyte_api/models/destination_oracle.py @@ -130,7 +130,7 @@ class DestinationOracle: username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) r"""The username to access the database. This user must have CREATE USER privileges in the database.""" DESTINATION_TYPE: Final[Oracle] = dataclasses.field(default=Oracle.ORACLE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) - encryption: Optional[Encryption] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption'), 'exclude': lambda f: f is None }}) + encryption: Optional[DestinationOracleEncryption] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption'), 'exclude': lambda f: f is None }}) r"""The encryption method which is used when communicating with the database.""" jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).""" @@ -147,6 +147,6 @@ class DestinationOracle: -Encryption = Union[DestinationOracleUnencrypted, NativeNetworkEncryptionNNE, TLSEncryptedVerifyCertificate] +DestinationOracleEncryption = Union[DestinationOracleUnencrypted, NativeNetworkEncryptionNNE, TLSEncryptedVerifyCertificate] DestinationOracleSSHTunnelMethod = Union[DestinationOracleNoTunnel, DestinationOracleSSHKeyAuthentication, DestinationOraclePasswordAuthentication] diff --git a/src/airbyte_api/models/destination_postgres.py b/src/airbyte_api/models/destination_postgres.py index 4406b0ea..cab006a2 100644 --- a/src/airbyte_api/models/destination_postgres.py +++ b/src/airbyte_api/models/destination_postgres.py @@ -33,7 +33,7 @@ class VerifyFull: -class DestinationPostgresSchemasSSLModeSSLModesMode(str, Enum): +class DestinationPostgresSchemasSSLModeSSLModes5Mode(str, Enum): VERIFY_CA = 'verify-ca' @@ -45,12 +45,12 @@ class VerifyCa: r"""CA certificate""" client_key_password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_key_password'), 'exclude': lambda f: f is None }}) r"""Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.""" - MODE: Final[Optional[DestinationPostgresSchemasSSLModeSSLModesMode]] = dataclasses.field(default=DestinationPostgresSchemasSSLModeSSLModesMode.VERIFY_CA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + MODE: Final[Optional[DestinationPostgresSchemasSSLModeSSLModes5Mode]] = dataclasses.field(default=DestinationPostgresSchemasSSLModeSSLModes5Mode.VERIFY_CA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) -class DestinationPostgresSchemasSslModeMode(str, Enum): +class DestinationPostgresSchemasSSLModeSSLModesMode(str, Enum): REQUIRE = 'require' @@ -58,12 +58,12 @@ class DestinationPostgresSchemasSslModeMode(str, Enum): @dataclasses.dataclass class Require: r"""Require SSL mode.""" - MODE: Final[Optional[DestinationPostgresSchemasSslModeMode]] = dataclasses.field(default=DestinationPostgresSchemasSslModeMode.REQUIRE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + MODE: Final[Optional[DestinationPostgresSchemasSSLModeSSLModesMode]] = dataclasses.field(default=DestinationPostgresSchemasSSLModeSSLModesMode.REQUIRE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) -class DestinationPostgresSchemasMode(str, Enum): +class DestinationPostgresSchemasSslModeMode(str, Enum): PREFER = 'prefer' @@ -71,12 +71,12 @@ class DestinationPostgresSchemasMode(str, Enum): @dataclasses.dataclass class Prefer: r"""Prefer SSL mode.""" - MODE: Final[Optional[DestinationPostgresSchemasMode]] = dataclasses.field(default=DestinationPostgresSchemasMode.PREFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + MODE: Final[Optional[DestinationPostgresSchemasSslModeMode]] = dataclasses.field(default=DestinationPostgresSchemasSslModeMode.PREFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) -class DestinationPostgresMode(str, Enum): +class DestinationPostgresSchemasMode(str, Enum): ALLOW = 'allow' @@ -84,12 +84,12 @@ class DestinationPostgresMode(str, Enum): @dataclasses.dataclass class Allow: r"""Allow SSL mode.""" - MODE: Final[Optional[DestinationPostgresMode]] = dataclasses.field(default=DestinationPostgresMode.ALLOW, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + MODE: Final[Optional[DestinationPostgresSchemasMode]] = dataclasses.field(default=DestinationPostgresSchemasMode.ALLOW, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) -class DestinationPostgresSchemasSSLModeSSLModes1Mode(str, Enum): +class DestinationPostgresMode(str, Enum): DISABLE = 'disable' @@ -97,7 +97,7 @@ class DestinationPostgresSchemasSSLModeSSLModes1Mode(str, Enum): @dataclasses.dataclass class Disable: r"""Disable SSL.""" - MODE: Final[Optional[DestinationPostgresSchemasSSLModeSSLModes1Mode]] = dataclasses.field(default=DestinationPostgresSchemasSSLModeSSLModes1Mode.DISABLE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + MODE: Final[Optional[DestinationPostgresMode]] = dataclasses.field(default=DestinationPostgresMode.DISABLE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) @@ -198,6 +198,8 @@ class DestinationPostgres: """ tunnel_method: Optional[DestinationPostgresSSHTunnelMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + unconstrained_number: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('unconstrained_number'), 'exclude': lambda f: f is None }}) + r"""Create numeric columns as unconstrained DECIMAL instead of NUMBER(38, 9). This will allow increased precision in numeric values. (this is disabled by default for backwards compatibility, but is recommended to enable)""" diff --git a/src/airbyte_api/models/destination_s3.py b/src/airbyte_api/models/destination_s3.py index 500466f4..1d11803b 100644 --- a/src/airbyte_api/models/destination_s3.py +++ b/src/airbyte_api/models/destination_s3.py @@ -5,7 +5,7 @@ from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from enum import Enum -from typing import Final, Optional, Union +from typing import Any, Dict, Final, Optional, Union class S3(str, Enum): @@ -30,11 +30,12 @@ class DestinationS3SchemasFormatOutputFormatFormatType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationS3ParquetColumnarStorage: + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) block_size_mb: Optional[int] = dataclasses.field(default=128, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('block_size_mb'), 'exclude': lambda f: f is None }}) r"""This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.""" compression_codec: Optional[DestinationS3SchemasCompressionCodec] = dataclasses.field(default=DestinationS3SchemasCompressionCodec.UNCOMPRESSED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_codec'), 'exclude': lambda f: f is None }}) r"""The compression algorithm used to compress data pages.""" - dictionary_encoding: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dictionary_encoding'), 'exclude': lambda f: f is None }}) + dictionary_encoding: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dictionary_encoding'), 'exclude': lambda f: f is None }}) r"""Default: true.""" dictionary_page_size_kb: Optional[int] = dataclasses.field(default=1024, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dictionary_page_size_kb'), 'exclude': lambda f: f is None }}) r"""There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.""" @@ -54,6 +55,7 @@ class DestinationS3SchemasFormatOutputFormat3CompressionCodecCodec(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationS3Snappy: + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) codec: Optional[DestinationS3SchemasFormatOutputFormat3CompressionCodecCodec] = dataclasses.field(default=DestinationS3SchemasFormatOutputFormat3CompressionCodecCodec.SNAPPY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec'), 'exclude': lambda f: f is None }}) @@ -66,11 +68,10 @@ class DestinationS3SchemasFormatOutputFormat3Codec(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationS3Zstandard: + compression_level: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_level') }}) + include_checksum: bool = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('include_checksum') }}) + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) codec: Optional[DestinationS3SchemasFormatOutputFormat3Codec] = dataclasses.field(default=DestinationS3SchemasFormatOutputFormat3Codec.ZSTANDARD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec'), 'exclude': lambda f: f is None }}) - compression_level: Optional[int] = dataclasses.field(default=3, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_level'), 'exclude': lambda f: f is None }}) - r"""Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.""" - include_checksum: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('include_checksum'), 'exclude': lambda f: f is None }}) - r"""If true, include a checksum with each data block.""" @@ -82,9 +83,9 @@ class DestinationS3SchemasFormatOutputFormatCodec(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationS3Xz: + compression_level: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_level') }}) + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) codec: Optional[DestinationS3SchemasFormatOutputFormatCodec] = dataclasses.field(default=DestinationS3SchemasFormatOutputFormatCodec.XZ, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec'), 'exclude': lambda f: f is None }}) - compression_level: Optional[int] = dataclasses.field(default=6, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_level'), 'exclude': lambda f: f is None }}) - r"""See here for details.""" @@ -96,6 +97,7 @@ class DestinationS3SchemasFormatCodec(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationS3Bzip2: + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) codec: Optional[DestinationS3SchemasFormatCodec] = dataclasses.field(default=DestinationS3SchemasFormatCodec.BZIP2, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec'), 'exclude': lambda f: f is None }}) @@ -108,9 +110,9 @@ class DestinationS3SchemasCodec(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationS3Deflate: + compression_level: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_level') }}) + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) codec: Optional[DestinationS3SchemasCodec] = dataclasses.field(default=DestinationS3SchemasCodec.DEFLATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec'), 'exclude': lambda f: f is None }}) - compression_level: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_level'), 'exclude': lambda f: f is None }}) - r"""0: no compression & fastest, 9: best compression & slowest.""" @@ -122,6 +124,7 @@ class DestinationS3Codec(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationS3SchemasFormatNoCompression: + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) codec: Optional[DestinationS3Codec] = dataclasses.field(default=DestinationS3Codec.NO_COMPRESSION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('codec'), 'exclude': lambda f: f is None }}) @@ -136,6 +139,7 @@ class DestinationS3SchemasFormatFormatType(str, Enum): class DestinationS3AvroApacheAvro: compression_codec: DestinationS3CompressionCodec = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_codec') }}) r"""The compression algorithm used to compress data. Default to no compression.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) format_type: Optional[DestinationS3SchemasFormatFormatType] = dataclasses.field(default=DestinationS3SchemasFormatFormatType.AVRO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type'), 'exclude': lambda f: f is None }}) @@ -148,6 +152,7 @@ class DestinationS3SchemasFormatOutputFormatCompressionType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationS3SchemasGZIP: + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) compression_type: Optional[DestinationS3SchemasFormatOutputFormatCompressionType] = dataclasses.field(default=DestinationS3SchemasFormatOutputFormatCompressionType.GZIP, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }}) @@ -160,13 +165,13 @@ class DestinationS3SchemasFormatCompressionType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationS3SchemasNoCompression: + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) compression_type: Optional[DestinationS3SchemasFormatCompressionType] = dataclasses.field(default=DestinationS3SchemasFormatCompressionType.NO_COMPRESSION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }}) class DestinationS3SchemasFlattening(str, Enum): - r"""Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.""" NO_FLATTENING = 'No flattening' ROOT_LEVEL_FLATTENING = 'Root level flattening' @@ -178,10 +183,10 @@ class DestinationS3SchemasFormatType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationS3JSONLinesNewlineDelimitedJSON: + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) compression: Optional[DestinationS3SchemasCompression] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression'), 'exclude': lambda f: f is None }}) r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".jsonl.gz\\").""" flattening: Optional[DestinationS3SchemasFlattening] = dataclasses.field(default=DestinationS3SchemasFlattening.NO_FLATTENING, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening'), 'exclude': lambda f: f is None }}) - r"""Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.""" format_type: Optional[DestinationS3SchemasFormatType] = dataclasses.field(default=DestinationS3SchemasFormatType.JSONL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type'), 'exclude': lambda f: f is None }}) @@ -194,6 +199,7 @@ class DestinationS3SchemasCompressionType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationS3GZIP: + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) compression_type: Optional[DestinationS3SchemasCompressionType] = dataclasses.field(default=DestinationS3SchemasCompressionType.GZIP, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }}) @@ -206,13 +212,13 @@ class DestinationS3CompressionType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationS3NoCompression: + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) compression_type: Optional[DestinationS3CompressionType] = dataclasses.field(default=DestinationS3CompressionType.NO_COMPRESSION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }}) class DestinationS3Flattening(str, Enum): - r"""Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.""" NO_FLATTENING = 'No flattening' ROOT_LEVEL_FLATTENING = 'Root level flattening' @@ -224,10 +230,10 @@ class DestinationS3FormatType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationS3CSVCommaSeparatedValues: + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) compression: Optional[DestinationS3Compression] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression'), 'exclude': lambda f: f is None }}) - r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".csv.gz\\").""" + r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".jsonl.gz\\").""" flattening: Optional[DestinationS3Flattening] = dataclasses.field(default=DestinationS3Flattening.NO_FLATTENING, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening'), 'exclude': lambda f: f is None }}) - r"""Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.""" format_type: Optional[DestinationS3FormatType] = dataclasses.field(default=DestinationS3FormatType.CSV, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type'), 'exclude': lambda f: f is None }}) @@ -284,15 +290,15 @@ class DestinationS3: r"""The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.""" DESTINATION_TYPE: Final[S3] = dataclasses.field(default=S3.S3, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) file_name_pattern: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('file_name_pattern'), 'exclude': lambda f: f is None }}) - r"""The pattern allows you to set the file-name format for the S3 staging file(s)""" + r"""Pattern to match file names in the bucket directory. Read more here""" role_arn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('role_arn'), 'exclude': lambda f: f is None }}) - r"""The Role ARN""" + r"""The ARN of the AWS role to assume. Only usable in Airbyte Cloud.""" s3_bucket_region: Optional[DestinationS3S3BucketRegion] = dataclasses.field(default=DestinationS3S3BucketRegion.UNKNOWN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region'), 'exclude': lambda f: f is None }}) r"""The region of the S3 bucket. See here for all region codes.""" - s3_endpoint: Optional[str] = dataclasses.field(default='', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_endpoint'), 'exclude': lambda f: f is None }}) + s3_endpoint: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_endpoint'), 'exclude': lambda f: f is None }}) r"""Your S3 endpoint url. Read more here""" s3_path_format: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_path_format'), 'exclude': lambda f: f is None }}) - r"""Format string on how data will be organized inside the S3 bucket directory. Read more here""" + r"""Format string on how data will be organized inside the bucket directory. Read more here""" secret_access_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_access_key'), 'exclude': lambda f: f is None }}) r"""The corresponding secret to the access key ID. Read more here""" diff --git a/src/airbyte_api/models/destination_s3_data_lake.py b/src/airbyte_api/models/destination_s3_data_lake.py new file mode 100644 index 00000000..826a069c --- /dev/null +++ b/src/airbyte_api/models/destination_s3_data_lake.py @@ -0,0 +1,141 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Any, Dict, Final, Optional, Union + + +class DestinationS3DataLakeSchemasCatalogType(str, Enum): + REST = 'REST' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class RestCatalog: + r"""Configuration details for connecting to a REST catalog.""" + namespace: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('namespace') }}) + r"""The namespace to be used in the Table identifier. + This will ONLY be used if the `Destination Namespace` setting for the connection is set to + `Destination-defined` or `Source-defined` + """ + server_uri: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('server_uri') }}) + r"""The base URL of the Rest server used to connect to the Rest catalog.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + catalog_type: Optional[DestinationS3DataLakeSchemasCatalogType] = dataclasses.field(default=DestinationS3DataLakeSchemasCatalogType.REST, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('catalog_type'), 'exclude': lambda f: f is None }}) + + + + +class DestinationS3DataLakeCatalogType(str, Enum): + GLUE = 'GLUE' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class GlueCatalog: + r"""Configuration details for connecting to an AWS Glue-based Iceberg catalog.""" + database_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database_name') }}) + r"""The Glue database name. This will ONLY be used if the `Destination Namespace` setting for the connection is set to `Destination-defined` or `Source-defined`""" + glue_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('glue_id') }}) + r"""The AWS Account ID associated with the Glue service used by the Iceberg catalog.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + catalog_type: Optional[DestinationS3DataLakeCatalogType] = dataclasses.field(default=DestinationS3DataLakeCatalogType.GLUE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('catalog_type'), 'exclude': lambda f: f is None }}) + role_arn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('role_arn'), 'exclude': lambda f: f is None }}) + r"""The ARN of the AWS role to assume. Only usable in Airbyte Cloud.""" + + + + +class DestinationS3DataLakeSchemasCatalogTypeCatalogType(str, Enum): + NESSIE = 'NESSIE' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class NessieCatalog: + r"""Configuration details for connecting to a Nessie-based Iceberg catalog.""" + namespace: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('namespace') }}) + r"""The Nessie namespace to be used in the Table identifier. + This will ONLY be used if the `Destination Namespace` setting for the connection is set to + `Destination-defined` or `Source-defined` + """ + server_uri: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('server_uri') }}) + r"""The base URL of the Nessie server used to connect to the Nessie catalog.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token'), 'exclude': lambda f: f is None }}) + r"""Optional token for authentication with the Nessie server.""" + catalog_type: Optional[DestinationS3DataLakeSchemasCatalogTypeCatalogType] = dataclasses.field(default=DestinationS3DataLakeSchemasCatalogTypeCatalogType.NESSIE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('catalog_type'), 'exclude': lambda f: f is None }}) + + + + +class S3DataLake(str, Enum): + S3_DATA_LAKE = 's3-data-lake' + + +class DestinationS3DataLakeS3BucketRegion(str, Enum): + r"""The region of the S3 bucket. See here for all region codes.""" + UNKNOWN = '' + AF_SOUTH_1 = 'af-south-1' + AP_EAST_1 = 'ap-east-1' + AP_NORTHEAST_1 = 'ap-northeast-1' + AP_NORTHEAST_2 = 'ap-northeast-2' + AP_NORTHEAST_3 = 'ap-northeast-3' + AP_SOUTH_1 = 'ap-south-1' + AP_SOUTH_2 = 'ap-south-2' + AP_SOUTHEAST_1 = 'ap-southeast-1' + AP_SOUTHEAST_2 = 'ap-southeast-2' + AP_SOUTHEAST_3 = 'ap-southeast-3' + AP_SOUTHEAST_4 = 'ap-southeast-4' + CA_CENTRAL_1 = 'ca-central-1' + CA_WEST_1 = 'ca-west-1' + CN_NORTH_1 = 'cn-north-1' + CN_NORTHWEST_1 = 'cn-northwest-1' + EU_CENTRAL_1 = 'eu-central-1' + EU_CENTRAL_2 = 'eu-central-2' + EU_NORTH_1 = 'eu-north-1' + EU_SOUTH_1 = 'eu-south-1' + EU_SOUTH_2 = 'eu-south-2' + EU_WEST_1 = 'eu-west-1' + EU_WEST_2 = 'eu-west-2' + EU_WEST_3 = 'eu-west-3' + IL_CENTRAL_1 = 'il-central-1' + ME_CENTRAL_1 = 'me-central-1' + ME_SOUTH_1 = 'me-south-1' + SA_EAST_1 = 'sa-east-1' + US_EAST_1 = 'us-east-1' + US_EAST_2 = 'us-east-2' + US_GOV_EAST_1 = 'us-gov-east-1' + US_GOV_WEST_1 = 'us-gov-west-1' + US_WEST_1 = 'us-west-1' + US_WEST_2 = 'us-west-2' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationS3DataLake: + r"""Defines the configurations required to connect to an Iceberg catalog, including warehouse location, main branch name, and catalog type specifics.""" + catalog_type: CatalogType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('catalog_type') }}) + r"""Specifies the type of Iceberg catalog (e.g., NESSIE, GLUE, REST) and its associated configuration.""" + s3_bucket_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_name') }}) + r"""The name of the S3 bucket that will host the Iceberg data.""" + s3_bucket_region: DestinationS3DataLakeS3BucketRegion = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region') }}) + r"""The region of the S3 bucket. See here for all region codes.""" + warehouse_location: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('warehouse_location') }}) + r"""The root location of the data warehouse used by the Iceberg catalog. Typically includes a bucket name and path within that bucket. For AWS Glue and Nessie, must include the storage protocol (such as \\"s3://\\" for Amazon S3).""" + access_key_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key_id'), 'exclude': lambda f: f is None }}) + r"""The AWS Access Key ID with permissions for S3 and Glue operations.""" + DESTINATION_TYPE: Final[S3DataLake] = dataclasses.field(default=S3DataLake.S3_DATA_LAKE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + main_branch_name: Optional[str] = dataclasses.field(default='main', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('main_branch_name'), 'exclude': lambda f: f is None }}) + r"""The primary or default branch name in the catalog. Most query engines will use \\"main\\" by default. See Iceberg documentation for more information.""" + s3_endpoint: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_endpoint'), 'exclude': lambda f: f is None }}) + r"""Your S3 endpoint url. Read more here""" + secret_access_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_access_key'), 'exclude': lambda f: f is None }}) + r"""The AWS Secret Access Key paired with the Access Key ID for AWS authentication.""" + + + +CatalogType = Union[NessieCatalog, GlueCatalog, RestCatalog] diff --git a/src/airbyte_api/models/destination_s3_glue.py b/src/airbyte_api/models/destination_s3_glue.py deleted file mode 100644 index d275b6b5..00000000 --- a/src/airbyte_api/models/destination_s3_glue.py +++ /dev/null @@ -1,136 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from airbyte_api import utils -from dataclasses_json import Undefined, dataclass_json -from enum import Enum -from typing import Final, Optional, Union - - -class S3Glue(str, Enum): - S3_GLUE = 's3-glue' - - -class DestinationS3GlueSchemasCompressionType(str, Enum): - GZIP = 'GZIP' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DestinationS3GlueGZIP: - compression_type: Optional[DestinationS3GlueSchemasCompressionType] = dataclasses.field(default=DestinationS3GlueSchemasCompressionType.GZIP, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }}) - - - - -class DestinationS3GlueCompressionType(str, Enum): - NO_COMPRESSION = 'No Compression' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DestinationS3GlueNoCompression: - compression_type: Optional[DestinationS3GlueCompressionType] = dataclasses.field(default=DestinationS3GlueCompressionType.NO_COMPRESSION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression_type'), 'exclude': lambda f: f is None }}) - - - - -class Flattening(str, Enum): - r"""Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.""" - NO_FLATTENING = 'No flattening' - ROOT_LEVEL_FLATTENING = 'Root level flattening' - - -class DestinationS3GlueFormatType(str, Enum): - JSONL = 'JSONL' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DestinationS3GlueJSONLinesNewlineDelimitedJSON: - compression: Optional[DestinationS3GlueCompression] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('compression'), 'exclude': lambda f: f is None }}) - r"""Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \\".jsonl.gz\\").""" - flattening: Optional[Flattening] = dataclasses.field(default=Flattening.ROOT_LEVEL_FLATTENING, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flattening'), 'exclude': lambda f: f is None }}) - r"""Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.""" - format_type: Optional[DestinationS3GlueFormatType] = dataclasses.field(default=DestinationS3GlueFormatType.JSONL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format_type'), 'exclude': lambda f: f is None }}) - - - - -class SerializationLibrary(str, Enum): - r"""The library that your query engine will use for reading and writing data in your lake.""" - ORG_OPENX_DATA_JSONSERDE_JSON_SER_DE = 'org.openx.data.jsonserde.JsonSerDe' - ORG_APACHE_HIVE_HCATALOG_DATA_JSON_SER_DE = 'org.apache.hive.hcatalog.data.JsonSerDe' - - -class DestinationS3GlueS3BucketRegion(str, Enum): - r"""The region of the S3 bucket. See here for all region codes.""" - UNKNOWN = '' - AF_SOUTH_1 = 'af-south-1' - AP_EAST_1 = 'ap-east-1' - AP_NORTHEAST_1 = 'ap-northeast-1' - AP_NORTHEAST_2 = 'ap-northeast-2' - AP_NORTHEAST_3 = 'ap-northeast-3' - AP_SOUTH_1 = 'ap-south-1' - AP_SOUTH_2 = 'ap-south-2' - AP_SOUTHEAST_1 = 'ap-southeast-1' - AP_SOUTHEAST_2 = 'ap-southeast-2' - AP_SOUTHEAST_3 = 'ap-southeast-3' - AP_SOUTHEAST_4 = 'ap-southeast-4' - CA_CENTRAL_1 = 'ca-central-1' - CA_WEST_1 = 'ca-west-1' - CN_NORTH_1 = 'cn-north-1' - CN_NORTHWEST_1 = 'cn-northwest-1' - EU_CENTRAL_1 = 'eu-central-1' - EU_CENTRAL_2 = 'eu-central-2' - EU_NORTH_1 = 'eu-north-1' - EU_SOUTH_1 = 'eu-south-1' - EU_SOUTH_2 = 'eu-south-2' - EU_WEST_1 = 'eu-west-1' - EU_WEST_2 = 'eu-west-2' - EU_WEST_3 = 'eu-west-3' - IL_CENTRAL_1 = 'il-central-1' - ME_CENTRAL_1 = 'me-central-1' - ME_SOUTH_1 = 'me-south-1' - SA_EAST_1 = 'sa-east-1' - US_EAST_1 = 'us-east-1' - US_EAST_2 = 'us-east-2' - US_GOV_EAST_1 = 'us-gov-east-1' - US_GOV_WEST_1 = 'us-gov-west-1' - US_WEST_1 = 'us-west-1' - US_WEST_2 = 'us-west-2' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DestinationS3Glue: - format: DestinationS3GlueOutputFormat = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format') }}) - r"""Format of the data output. See here for more details""" - glue_database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('glue_database') }}) - r"""Name of the glue database for creating the tables, leave blank if no integration""" - s3_bucket_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_name') }}) - r"""The name of the S3 bucket. Read more here.""" - s3_bucket_path: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_path') }}) - r"""Directory under the S3 bucket where data will be written. Read more here""" - access_key_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key_id'), 'exclude': lambda f: f is None }}) - r"""The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.""" - DESTINATION_TYPE: Final[S3Glue] = dataclasses.field(default=S3Glue.S3_GLUE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) - file_name_pattern: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('file_name_pattern'), 'exclude': lambda f: f is None }}) - r"""The pattern allows you to set the file-name format for the S3 staging file(s)""" - glue_serialization_library: Optional[SerializationLibrary] = dataclasses.field(default=SerializationLibrary.ORG_OPENX_DATA_JSONSERDE_JSON_SER_DE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('glue_serialization_library'), 'exclude': lambda f: f is None }}) - r"""The library that your query engine will use for reading and writing data in your lake.""" - s3_bucket_region: Optional[DestinationS3GlueS3BucketRegion] = dataclasses.field(default=DestinationS3GlueS3BucketRegion.UNKNOWN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region'), 'exclude': lambda f: f is None }}) - r"""The region of the S3 bucket. See here for all region codes.""" - s3_endpoint: Optional[str] = dataclasses.field(default='', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_endpoint'), 'exclude': lambda f: f is None }}) - r"""Your S3 endpoint url. Read more here""" - s3_path_format: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_path_format'), 'exclude': lambda f: f is None }}) - r"""Format string on how data will be organized inside the S3 bucket directory. Read more here""" - secret_access_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_access_key'), 'exclude': lambda f: f is None }}) - r"""The corresponding secret to the access key ID. Read more here""" - - - -DestinationS3GlueCompression = Union[DestinationS3GlueNoCompression, DestinationS3GlueGZIP] - -DestinationS3GlueOutputFormat = Union[DestinationS3GlueJSONLinesNewlineDelimitedJSON] diff --git a/src/airbyte_api/models/destination_salesforce.py b/src/airbyte_api/models/destination_salesforce.py new file mode 100644 index 00000000..971dbab8 --- /dev/null +++ b/src/airbyte_api/models/destination_salesforce.py @@ -0,0 +1,114 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Any, Dict, Final, Optional, Union + + +class AuthType(str, Enum): + CLIENT = 'Client' + + +class DestinationSalesforceSalesforce(str, Enum): + SALESFORCE = 'salesforce' + + +class DestinationSalesforceS3BucketRegion(str, Enum): + r"""The region of the S3 bucket. See here for all region codes.""" + UNKNOWN = '' + AF_SOUTH_1 = 'af-south-1' + AP_EAST_1 = 'ap-east-1' + AP_NORTHEAST_1 = 'ap-northeast-1' + AP_NORTHEAST_2 = 'ap-northeast-2' + AP_NORTHEAST_3 = 'ap-northeast-3' + AP_SOUTH_1 = 'ap-south-1' + AP_SOUTH_2 = 'ap-south-2' + AP_SOUTHEAST_1 = 'ap-southeast-1' + AP_SOUTHEAST_2 = 'ap-southeast-2' + AP_SOUTHEAST_3 = 'ap-southeast-3' + AP_SOUTHEAST_4 = 'ap-southeast-4' + CA_CENTRAL_1 = 'ca-central-1' + CA_WEST_1 = 'ca-west-1' + CN_NORTH_1 = 'cn-north-1' + CN_NORTHWEST_1 = 'cn-northwest-1' + EU_CENTRAL_1 = 'eu-central-1' + EU_CENTRAL_2 = 'eu-central-2' + EU_NORTH_1 = 'eu-north-1' + EU_SOUTH_1 = 'eu-south-1' + EU_SOUTH_2 = 'eu-south-2' + EU_WEST_1 = 'eu-west-1' + EU_WEST_2 = 'eu-west-2' + EU_WEST_3 = 'eu-west-3' + IL_CENTRAL_1 = 'il-central-1' + ME_CENTRAL_1 = 'me-central-1' + ME_SOUTH_1 = 'me-south-1' + SA_EAST_1 = 'sa-east-1' + US_EAST_1 = 'us-east-1' + US_EAST_2 = 'us-east-2' + US_GOV_EAST_1 = 'us-gov-east-1' + US_GOV_WEST_1 = 'us-gov-west-1' + US_WEST_1 = 'us-west-1' + US_WEST_2 = 'us-west-2' + + +class DestinationSalesforceSchemasStorageType(str, Enum): + S3 = 'S3' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationSalesforceS3: + bucket_path: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('bucket_path') }}) + r"""All files in the bucket will be prefixed by this.""" + s3_bucket_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_name') }}) + r"""The name of the S3 bucket. Read more here.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + access_key_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key_id'), 'exclude': lambda f: f is None }}) + r"""The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.""" + role_arn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('role_arn'), 'exclude': lambda f: f is None }}) + r"""The ARN of the AWS role to assume. Only usable in Airbyte Cloud.""" + s3_bucket_region: Optional[DestinationSalesforceS3BucketRegion] = dataclasses.field(default=DestinationSalesforceS3BucketRegion.UNKNOWN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region'), 'exclude': lambda f: f is None }}) + r"""The region of the S3 bucket. See here for all region codes.""" + s3_endpoint: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_endpoint'), 'exclude': lambda f: f is None }}) + r"""Your S3 endpoint url. Read more here""" + secret_access_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_access_key'), 'exclude': lambda f: f is None }}) + r"""The corresponding secret to the access key ID. Read more here""" + storage_type: Optional[DestinationSalesforceSchemasStorageType] = dataclasses.field(default=DestinationSalesforceSchemasStorageType.S3, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage_type'), 'exclude': lambda f: f is None }}) + + + + +class DestinationSalesforceStorageType(str, Enum): + NONE = 'None' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationSalesforceNone: + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + storage_type: Optional[DestinationSalesforceStorageType] = dataclasses.field(default=DestinationSalesforceStorageType.NONE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage_type'), 'exclude': lambda f: f is None }}) + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationSalesforce: + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + r"""Enter your Salesforce developer application's Client ID.""" + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + r"""Enter your Salesforce developer application's Client secret.""" + refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }}) + r"""Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce account.""" + AUTH_TYPE: Final[AuthType] = dataclasses.field(default=AuthType.CLIENT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) + DESTINATION_TYPE: Final[DestinationSalesforceSalesforce] = dataclasses.field(default=DestinationSalesforceSalesforce.SALESFORCE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + is_sandbox: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('is_sandbox'), 'exclude': lambda f: f is None }}) + r"""Toggle if you're using a Salesforce Sandbox.""" + object_storage_config: Optional[DestinationSalesforceObjectStorageConfiguration] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('object_storage_config'), 'exclude': lambda f: f is None }}) + + + +DestinationSalesforceObjectStorageConfiguration = Union[DestinationSalesforceNone, DestinationSalesforceS3] diff --git a/src/airbyte_api/models/destination_snowflake.py b/src/airbyte_api/models/destination_snowflake.py index 92766da8..cc220519 100644 --- a/src/airbyte_api/models/destination_snowflake.py +++ b/src/airbyte_api/models/destination_snowflake.py @@ -58,7 +58,7 @@ class KeyPairAuthentication: -class DestinationSnowflakeSnowflake(str, Enum): +class Snowflake(str, Enum): SNOWFLAKE = 'snowflake' @@ -78,7 +78,7 @@ class DestinationSnowflake: warehouse: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('warehouse') }}) r"""Enter the name of the warehouse that you want to use as a compute cluster""" credentials: Optional[AuthorizationMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) - DESTINATION_TYPE: Final[DestinationSnowflakeSnowflake] = dataclasses.field(default=DestinationSnowflakeSnowflake.SNOWFLAKE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + DESTINATION_TYPE: Final[Snowflake] = dataclasses.field(default=Snowflake.SNOWFLAKE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) disable_type_dedupe: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('disable_type_dedupe'), 'exclude': lambda f: f is None }}) r"""Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions""" jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/destination_surrealdb.py b/src/airbyte_api/models/destination_surrealdb.py new file mode 100644 index 00000000..4d7232a3 --- /dev/null +++ b/src/airbyte_api/models/destination_surrealdb.py @@ -0,0 +1,30 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class Surrealdb(str, Enum): + SURREALDB = 'surrealdb' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationSurrealdb: + surrealdb_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('surrealdb_password') }}) + r"""The password to use in SurrealDB.""" + surrealdb_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('surrealdb_url') }}) + r"""The URL of the SurrealDB instance.""" + surrealdb_username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('surrealdb_username') }}) + r"""The username to use in SurrealDB.""" + DESTINATION_TYPE: Final[Surrealdb] = dataclasses.field(default=Surrealdb.SURREALDB, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + surrealdb_database: Optional[str] = dataclasses.field(default='airbyte', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('surrealdb_database'), 'exclude': lambda f: f is None }}) + r"""The database to use in SurrealDB.""" + surrealdb_namespace: Optional[str] = dataclasses.field(default='airbyte', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('surrealdb_namespace'), 'exclude': lambda f: f is None }}) + r"""The namespace to use in SurrealDB.""" + + diff --git a/src/airbyte_api/models/destination_teradata.py b/src/airbyte_api/models/destination_teradata.py index 1975fa46..e349ecd9 100644 --- a/src/airbyte_api/models/destination_teradata.py +++ b/src/airbyte_api/models/destination_teradata.py @@ -12,6 +12,38 @@ class Teradata(str, Enum): TERADATA = 'teradata' +class DestinationTeradataSchemasAuthType(str, Enum): + LDAP = 'LDAP' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class Ldap: + password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) + r"""Enter the password associated with the username.""" + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + r"""Username to use to access the database.""" + AUTH_TYPE: Final[Optional[DestinationTeradataSchemasAuthType]] = dataclasses.field(default=DestinationTeradataSchemasAuthType.LDAP, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) + + + + +class DestinationTeradataAuthType(str, Enum): + TD2 = 'TD2' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class Td2: + password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) + r"""Enter the password associated with the username.""" + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + r"""Username to use to access the database.""" + AUTH_TYPE: Final[Optional[DestinationTeradataAuthType]] = dataclasses.field(default=DestinationTeradataAuthType.TD2, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) + + + + class DestinationTeradataSchemasSSLModeSSLModes6Mode(str, Enum): VERIFY_FULL = 'verify-full' @@ -103,17 +135,22 @@ class DestinationTeradataDisable: class DestinationTeradata: host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }}) r"""Hostname of the database.""" - username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) - r"""Username to use to access the database.""" DESTINATION_TYPE: Final[Teradata] = dataclasses.field(default=Teradata.TERADATA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + disable_type_dedupe: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('disable_type_dedupe'), 'exclude': lambda f: f is None }}) + r"""Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions""" + drop_cascade: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('drop_cascade'), 'exclude': lambda f: f is None }}) + r"""Drop tables with CASCADE. WARNING! This will delete all data in all dependent objects (views, etc.). Use with caution. This option is intended for usecases which can easily rebuild the dependent objects.""" jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).""" - password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) - r"""Password associated with the username.""" + logmech: Optional[AuthorizationMechanism] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('logmech'), 'exclude': lambda f: f is None }}) + query_band: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query_band'), 'exclude': lambda f: f is None }}) + r"""Defines the custom session query band using name-value pairs. For example, 'org=Finance;report=Fin123;'""" + raw_data_schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('raw_data_schema'), 'exclude': lambda f: f is None }}) + r"""The database to write raw tables into""" schema: Optional[str] = dataclasses.field(default='airbyte_td', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schema'), 'exclude': lambda f: f is None }}) r"""The default schema tables are written to if the source does not specify a namespace. The usual value for this field is \\"public\\".""" ssl: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl'), 'exclude': lambda f: f is None }}) - r"""Encrypt data using SSL. When activating SSL, please select one of the connection modes.""" + r"""Encrypt data using SSL. When activating SSL, please select one of the SSL modes.""" ssl_mode: Optional[DestinationTeradataSSLModes] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_mode'), 'exclude': lambda f: f is None }}) r"""SSL connection modes. disable - Chose this mode to disable encryption of communication between Airbyte and destination database @@ -127,4 +164,6 @@ class DestinationTeradata: +AuthorizationMechanism = Union[Td2, Ldap] + DestinationTeradataSSLModes = Union[DestinationTeradataDisable, DestinationTeradataAllow, DestinationTeradataPrefer, DestinationTeradataRequire, DestinationTeradataVerifyCa, DestinationTeradataVerifyFull] diff --git a/src/airbyte_api/models/destination_typesense.py b/src/airbyte_api/models/destination_typesense.py index 5029a71f..33dba435 100644 --- a/src/airbyte_api/models/destination_typesense.py +++ b/src/airbyte_api/models/destination_typesense.py @@ -25,7 +25,7 @@ class DestinationTypesense: path: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('path'), 'exclude': lambda f: f is None }}) r"""Path of the Typesense instance. Default is none""" port: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port'), 'exclude': lambda f: f is None }}) - r"""Port of the Typesense instance. Ex: 8108, 80, 443. Default is 443""" + r"""Port of the Typesense instance. Ex: 8108, 80, 443. Default is 8108""" protocol: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('protocol'), 'exclude': lambda f: f is None }}) r"""Protocol of the Typesense instance. Ex: http or https. Default is https""" diff --git a/src/airbyte_api/models/destinationconfiguration.py b/src/airbyte_api/models/destinationconfiguration.py index 80364c75..5098414c 100644 --- a/src/airbyte_api/models/destinationconfiguration.py +++ b/src/airbyte_api/models/destinationconfiguration.py @@ -7,7 +7,9 @@ from .destination_bigquery import DestinationBigquery from .destination_clickhouse import DestinationClickhouse from .destination_convex import DestinationConvex +from .destination_customer_io import DestinationCustomerIo from .destination_databricks import DestinationDatabricks +from .destination_deepset import DestinationDeepset from .destination_dev_null import DestinationDevNull from .destination_duckdb import DestinationDuckdb from .destination_dynamodb import DestinationDynamodb @@ -16,11 +18,12 @@ from .destination_firestore import DestinationFirestore from .destination_gcs import DestinationGcs from .destination_google_sheets import DestinationGoogleSheets -from .destination_iceberg import DestinationIceberg +from .destination_hubspot import DestinationHubspot from .destination_milvus import DestinationMilvus from .destination_mongodb import DestinationMongodb from .destination_motherduck import DestinationMotherduck from .destination_mssql import DestinationMssql +from .destination_mssql_v2 import DestinationMssqlV2 from .destination_mysql import DestinationMysql from .destination_oracle import DestinationOracle from .destination_pgvector import DestinationPgvector @@ -31,10 +34,12 @@ from .destination_redis import DestinationRedis from .destination_redshift import DestinationRedshift from .destination_s3 import DestinationS3 -from .destination_s3_glue import DestinationS3Glue +from .destination_s3_data_lake import DestinationS3DataLake +from .destination_salesforce import DestinationSalesforce from .destination_sftp_json import DestinationSftpJSON from .destination_snowflake import DestinationSnowflake from .destination_snowflake_cortex import DestinationSnowflakeCortex +from .destination_surrealdb import DestinationSurrealdb from .destination_teradata import DestinationTeradata from .destination_timeplus import DestinationTimeplus from .destination_typesense import DestinationTypesense @@ -43,4 +48,4 @@ from .destination_yellowbrick import DestinationYellowbrick from typing import Union -DestinationConfiguration = Union[DestinationGoogleSheets, DestinationAstra, DestinationAwsDatalake, DestinationAzureBlobStorage, DestinationBigquery, DestinationClickhouse, DestinationConvex, DestinationDatabricks, DestinationDevNull, DestinationDuckdb, DestinationDynamodb, DestinationElasticsearch, DestinationFirebolt, DestinationFirestore, DestinationGcs, DestinationIceberg, DestinationMilvus, DestinationMongodb, DestinationMotherduck, DestinationMssql, DestinationMysql, DestinationOracle, DestinationPgvector, DestinationPinecone, DestinationPostgres, DestinationPubsub, DestinationQdrant, DestinationRedis, DestinationRedshift, DestinationS3, DestinationS3Glue, DestinationSftpJSON, DestinationSnowflake, DestinationSnowflakeCortex, DestinationTeradata, DestinationTimeplus, DestinationTypesense, DestinationVectara, DestinationWeaviate, DestinationYellowbrick] +DestinationConfiguration = Union[DestinationGoogleSheets, DestinationAstra, DestinationAwsDatalake, DestinationAzureBlobStorage, DestinationBigquery, DestinationClickhouse, DestinationConvex, DestinationCustomerIo, DestinationDatabricks, DestinationDeepset, DestinationDevNull, DestinationDuckdb, DestinationDynamodb, DestinationElasticsearch, DestinationFirebolt, DestinationFirestore, DestinationGcs, DestinationHubspot, DestinationMilvus, DestinationMongodb, DestinationMotherduck, DestinationMssql, DestinationMssqlV2, DestinationMysql, DestinationOracle, DestinationPgvector, DestinationPinecone, DestinationPostgres, DestinationPubsub, DestinationQdrant, DestinationRedis, DestinationRedshift, DestinationS3, DestinationS3DataLake, DestinationSalesforce, DestinationSftpJSON, DestinationSnowflake, DestinationSnowflakeCortex, DestinationSurrealdb, DestinationTeradata, DestinationTimeplus, DestinationTypesense, DestinationVectara, DestinationWeaviate, DestinationYellowbrick] diff --git a/src/airbyte_api/models/destinationcreaterequest.py b/src/airbyte_api/models/destinationcreaterequest.py index 741e3564..a442beef 100644 --- a/src/airbyte_api/models/destinationcreaterequest.py +++ b/src/airbyte_api/models/destinationcreaterequest.py @@ -3,6 +3,7 @@ from __future__ import annotations import dataclasses from .destinationconfiguration import DestinationConfiguration +from .scopedresourcerequirements import ScopedResourceRequirements from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from typing import Optional @@ -18,5 +19,7 @@ class DestinationCreateRequest: workspace_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspaceId') }}) definition_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('definitionId'), 'exclude': lambda f: f is None }}) r"""The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.""" + resource_allocation: Optional[ScopedResourceRequirements] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('resourceAllocation'), 'exclude': lambda f: f is None }}) + r"""actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.""" diff --git a/src/airbyte_api/models/destinationpatchrequest.py b/src/airbyte_api/models/destinationpatchrequest.py index b29fc22e..31538d89 100644 --- a/src/airbyte_api/models/destinationpatchrequest.py +++ b/src/airbyte_api/models/destinationpatchrequest.py @@ -3,6 +3,7 @@ from __future__ import annotations import dataclasses from .destinationconfiguration import DestinationConfiguration +from .scopedresourcerequirements import ScopedResourceRequirements from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from typing import Optional @@ -14,5 +15,7 @@ class DestinationPatchRequest: configuration: Optional[DestinationConfiguration] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('configuration'), 'exclude': lambda f: f is None }}) r"""The values required to configure the destination.""" name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name'), 'exclude': lambda f: f is None }}) + resource_allocation: Optional[ScopedResourceRequirements] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('resourceAllocation'), 'exclude': lambda f: f is None }}) + r"""actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.""" diff --git a/src/airbyte_api/models/destinationputrequest.py b/src/airbyte_api/models/destinationputrequest.py index 68f04b45..dfc7538c 100644 --- a/src/airbyte_api/models/destinationputrequest.py +++ b/src/airbyte_api/models/destinationputrequest.py @@ -3,8 +3,10 @@ from __future__ import annotations import dataclasses from .destinationconfiguration import DestinationConfiguration +from .scopedresourcerequirements import ScopedResourceRequirements from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json +from typing import Optional @dataclass_json(undefined=Undefined.EXCLUDE) @@ -13,5 +15,7 @@ class DestinationPutRequest: configuration: DestinationConfiguration = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('configuration') }}) r"""The values required to configure the destination.""" name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + resource_allocation: Optional[ScopedResourceRequirements] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('resourceAllocation'), 'exclude': lambda f: f is None }}) + r"""actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.""" diff --git a/src/airbyte_api/models/destinationresponse.py b/src/airbyte_api/models/destinationresponse.py index 57586bf2..42fa9b56 100644 --- a/src/airbyte_api/models/destinationresponse.py +++ b/src/airbyte_api/models/destinationresponse.py @@ -3,8 +3,10 @@ from __future__ import annotations import dataclasses from .destinationconfiguration import DestinationConfiguration +from .scopedresourcerequirements import ScopedResourceRequirements from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json +from typing import Optional @dataclass_json(undefined=Undefined.EXCLUDE) @@ -19,5 +21,7 @@ class DestinationResponse: destination_type: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) workspace_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspaceId') }}) + resource_allocation: Optional[ScopedResourceRequirements] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('resourceAllocation'), 'exclude': lambda f: f is None }}) + r"""actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.""" diff --git a/src/airbyte_api/models/emailnotificationconfig.py b/src/airbyte_api/models/emailnotificationconfig.py new file mode 100644 index 00000000..1c8f4b63 --- /dev/null +++ b/src/airbyte_api/models/emailnotificationconfig.py @@ -0,0 +1,16 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import Optional + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class EmailNotificationConfig: + r"""Configures an email notification.""" + enabled: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('enabled'), 'exclude': lambda f: f is None }}) + + diff --git a/src/airbyte_api/models/geographyenum.py b/src/airbyte_api/models/encryptionmapperalgorithm.py similarity index 62% rename from src/airbyte_api/models/geographyenum.py rename to src/airbyte_api/models/encryptionmapperalgorithm.py index b3a3c10a..58f1676d 100644 --- a/src/airbyte_api/models/geographyenum.py +++ b/src/airbyte_api/models/encryptionmapperalgorithm.py @@ -4,7 +4,6 @@ from enum import Enum -class GeographyEnum(str, Enum): - AUTO = 'auto' - US = 'us' - EU = 'eu' +class EncryptionMapperAlgorithm(str, Enum): + RSA = 'RSA' + AES = 'AES' diff --git a/src/airbyte_api/models/intercom.py b/src/airbyte_api/models/intercom.py deleted file mode 100644 index e672bb54..00000000 --- a/src/airbyte_api/models/intercom.py +++ /dev/null @@ -1,18 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from airbyte_api import utils -from dataclasses_json import Undefined, dataclass_json -from typing import Optional - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class Intercom: - client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }}) - r"""Client Id for your Intercom application.""" - client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }}) - r"""Client Secret for your Intercom application.""" - - diff --git a/src/airbyte_api/models/jobtype.py b/src/airbyte_api/models/jobtype.py new file mode 100644 index 00000000..18e2bf31 --- /dev/null +++ b/src/airbyte_api/models/jobtype.py @@ -0,0 +1,15 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from enum import Enum + + +class JobType(str, Enum): + r"""enum that describes the different types of jobs that the platform runs.""" + GET_SPEC = 'get_spec' + CHECK_CONNECTION = 'check_connection' + DISCOVER_SCHEMA = 'discover_schema' + SYNC = 'sync' + RESET_CONNECTION = 'reset_connection' + CONNECTION_UPDATER = 'connection_updater' + REPLICATE = 'replicate' diff --git a/src/airbyte_api/models/jobtyperesourcelimit.py b/src/airbyte_api/models/jobtyperesourcelimit.py new file mode 100644 index 00000000..455a8bd3 --- /dev/null +++ b/src/airbyte_api/models/jobtyperesourcelimit.py @@ -0,0 +1,20 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from .jobtype import JobType +from .resourcerequirements import ResourceRequirements +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class JobTypeResourceLimit: + r"""sets resource requirements for a specific job type for an actor or actor definition. these values override the default, if both are set.""" + job_type: JobType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jobType') }}) + r"""enum that describes the different types of jobs that the platform runs.""" + resource_requirements: ResourceRequirements = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('resourceRequirements') }}) + r"""optional resource requirements to run workers (blank for unbounded allocations)""" + + diff --git a/src/airbyte_api/models/mapperconfiguration.py b/src/airbyte_api/models/mapperconfiguration.py index d0b2844e..8e963b87 100644 --- a/src/airbyte_api/models/mapperconfiguration.py +++ b/src/airbyte_api/models/mapperconfiguration.py @@ -2,10 +2,94 @@ from __future__ import annotations import dataclasses +from .encryptionmapperalgorithm import EncryptionMapperAlgorithm +from .rowfilteringoperation import RowFilteringOperation +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Union +class Mode(str, Enum): + CBC = 'CBC' + CFB = 'CFB' + OFB = 'OFB' + CTR = 'CTR' + GCM = 'GCM' + ECB = 'ECB' + + +class Padding(str, Enum): + NO_PADDING = 'NoPadding' + PKCS5_PADDING = 'PKCS5Padding' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class EncryptionAES: + algorithm: EncryptionMapperAlgorithm = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('algorithm') }}) + field_name_suffix: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('fieldNameSuffix') }}) + key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('key') }}) + mode: Mode = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }}) + padding: Padding = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('padding') }}) + target_field: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('targetField') }}) + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class EncryptionRSA: + algorithm: EncryptionMapperAlgorithm = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('algorithm') }}) + field_name_suffix: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('fieldNameSuffix') }}) + public_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('publicKey') }}) + target_field: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('targetField') }}) + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class MapperConfiguration: - r"""The values required to configure the mapper.""" +class RowFiltering: + conditions: RowFilteringOperation = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('conditions') }}) + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class FieldRenaming: + new_field_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('newFieldName') }}) + r"""The new name for the field after renaming.""" + original_field_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('originalFieldName') }}) + r"""The current name of the field to rename.""" + + + + +class HashingMethod(str, Enum): + r"""The hashing algorithm to use.""" + MD2 = 'MD2' + MD5 = 'MD5' + SHA_1 = 'SHA-1' + SHA_224 = 'SHA-224' + SHA_256 = 'SHA-256' + SHA_384 = 'SHA-384' + SHA_512 = 'SHA-512' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class Hashing: + field_name_suffix: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('fieldNameSuffix') }}) + r"""The suffix to append to the field name after hashing.""" + method: HashingMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) + r"""The hashing algorithm to use.""" + target_field: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('targetField') }}) + r"""The name of the field to be hashed.""" + + + +Encryption = Union[EncryptionRSA, EncryptionAES] + +MapperConfiguration = Union[Hashing, FieldRenaming, RowFiltering, Encryption] diff --git a/src/airbyte_api/models/notificationconfig.py b/src/airbyte_api/models/notificationconfig.py new file mode 100644 index 00000000..9f63e05b --- /dev/null +++ b/src/airbyte_api/models/notificationconfig.py @@ -0,0 +1,21 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from .emailnotificationconfig import EmailNotificationConfig +from .webhooknotificationconfig import WebhookNotificationConfig +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import Optional + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class NotificationConfig: + r"""Configures a notification.""" + email: Optional[EmailNotificationConfig] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('email'), 'exclude': lambda f: f is None }}) + r"""Configures an email notification.""" + webhook: Optional[WebhookNotificationConfig] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('webhook'), 'exclude': lambda f: f is None }}) + r"""Configures a webhook notification.""" + + diff --git a/src/airbyte_api/models/notificationsconfig.py b/src/airbyte_api/models/notificationsconfig.py new file mode 100644 index 00000000..80b573f7 --- /dev/null +++ b/src/airbyte_api/models/notificationsconfig.py @@ -0,0 +1,28 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from .notificationconfig import NotificationConfig +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import Optional + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class NotificationsConfig: + r"""Configures workspace notifications.""" + connection_update: Optional[NotificationConfig] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connectionUpdate'), 'exclude': lambda f: f is None }}) + r"""Configures a notification.""" + connection_update_action_required: Optional[NotificationConfig] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connectionUpdateActionRequired'), 'exclude': lambda f: f is None }}) + r"""Configures a notification.""" + failure: Optional[NotificationConfig] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('failure'), 'exclude': lambda f: f is None }}) + r"""Configures a notification.""" + success: Optional[NotificationConfig] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('success'), 'exclude': lambda f: f is None }}) + r"""Configures a notification.""" + sync_disabled: Optional[NotificationConfig] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('syncDisabled'), 'exclude': lambda f: f is None }}) + r"""Configures a notification.""" + sync_disabled_warning: Optional[NotificationConfig] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('syncDisabledWarning'), 'exclude': lambda f: f is None }}) + r"""Configures a notification.""" + + diff --git a/src/airbyte_api/models/oauthactornames.py b/src/airbyte_api/models/oauthactornames.py index b6948000..6d31de78 100644 --- a/src/airbyte_api/models/oauthactornames.py +++ b/src/airbyte_api/models/oauthactornames.py @@ -13,6 +13,7 @@ class OAuthActorNames(str, Enum): BING_ADS = 'bing-ads' DRIFT = 'drift' FACEBOOK_MARKETING = 'facebook-marketing' + FACEBOOK_PAGES = 'facebook-pages' GCS = 'gcs' GITHUB = 'github' GITLAB = 'gitlab' @@ -35,15 +36,14 @@ class OAuthActorNames(str, Enum): PINTEREST = 'pinterest' RD_STATION_MARKETING = 'rd-station-marketing' SALESFORCE = 'salesforce' + SHAREPOINT_ENTERPRISE = 'sharepoint-enterprise' SLACK = 'slack' SMARTSHEETS = 'smartsheets' SNAPCHAT_MARKETING = 'snapchat-marketing' - SNOWFLAKE = 'snowflake' SURVEYMONKEY = 'surveymonkey' TIKTOK_MARKETING = 'tiktok-marketing' TRELLO = 'trello' TYPEFORM = 'typeform' YOUTUBE_ANALYTICS = 'youtube-analytics' - ZENDESK_CHAT = 'zendesk-chat' ZENDESK_SUPPORT = 'zendesk-support' ZENDESK_TALK = 'zendesk-talk' diff --git a/src/airbyte_api/models/oauthcredentialsconfiguration.py b/src/airbyte_api/models/oauthcredentialsconfiguration.py deleted file mode 100644 index 280aa935..00000000 --- a/src/airbyte_api/models/oauthcredentialsconfiguration.py +++ /dev/null @@ -1,48 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" - -from __future__ import annotations -from .airtable import Airtable -from .amazon_ads import AmazonAds -from .amazon_seller_partner import AmazonSellerPartner -from .asana import Asana -from .azure_blob_storage import AzureBlobStorage -from .bing_ads import BingAds -from .drift import Drift -from .facebook_marketing import FacebookMarketing -from .gcs import Gcs -from .github import Github -from .gitlab import Gitlab -from .google_ads import GoogleAds -from .google_analytics_data_api import GoogleAnalyticsDataAPI -from .google_drive import GoogleDrive -from .google_search_console import GoogleSearchConsole -from .google_sheets import GoogleSheets -from .hubspot import Hubspot -from .instagram import Instagram -from .intercom import Intercom -from .lever_hiring import LeverHiring -from .linkedin_ads import LinkedinAds -from .mailchimp import Mailchimp -from .microsoft_onedrive import MicrosoftOnedrive -from .microsoft_sharepoint import MicrosoftSharepoint -from .microsoft_teams import MicrosoftTeams -from .monday import Monday -from .notion import Notion -from .pinterest import Pinterest -from .rd_station_marketing import RdStationMarketing -from .salesforce import Salesforce -from .shopify import Shopify -from .slack import Slack -from .smartsheets import Smartsheets -from .snapchat_marketing import SnapchatMarketing -from .snowflake import Snowflake -from .surveymonkey import Surveymonkey -from .tiktok_marketing import TiktokMarketing -from .typeform import Typeform -from .youtube_analytics import YoutubeAnalytics -from .zendesk_chat import ZendeskChat -from .zendesk_support import ZendeskSupport -from .zendesk_talk import ZendeskTalk -from typing import Any, Union - -OAuthCredentialsConfiguration = Union[Airtable, AmazonAds, AmazonSellerPartner, Asana, AzureBlobStorage, BingAds, Drift, FacebookMarketing, Gcs, Github, Gitlab, GoogleAds, GoogleAnalyticsDataAPI, GoogleDrive, GoogleSearchConsole, GoogleSheets, Hubspot, Instagram, Intercom, LeverHiring, LinkedinAds, Mailchimp, MicrosoftOnedrive, MicrosoftSharepoint, MicrosoftTeams, Monday, Notion, Pinterest, RdStationMarketing, Salesforce, Shopify, Slack, Smartsheets, SnapchatMarketing, Snowflake, Surveymonkey, TiktokMarketing, Any, Typeform, YoutubeAnalytics, ZendeskChat, ZendeskSupport, ZendeskTalk] diff --git a/src/airbyte_api/models/organizationoauthcredentialsrequest.py b/src/airbyte_api/models/organizationoauthcredentialsrequest.py new file mode 100644 index 00000000..e811b726 --- /dev/null +++ b/src/airbyte_api/models/organizationoauthcredentialsrequest.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from .actortypeenum import ActorTypeEnum +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import Any + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class OrganizationOAuthCredentialsRequest: + r"""POST body for creating/updating organization level OAuth credentials""" + actor_type: ActorTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('actorType') }}) + r"""Whether you're setting this override for a source or destination""" + configuration: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('configuration') }}) + r"""The values required to configure the source.""" + name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + r"""The name of the source i.e. google-ads""" + + diff --git a/src/airbyte_api/models/resourcerequirements.py b/src/airbyte_api/models/resourcerequirements.py new file mode 100644 index 00000000..2a922068 --- /dev/null +++ b/src/airbyte_api/models/resourcerequirements.py @@ -0,0 +1,21 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import Optional + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class ResourceRequirements: + r"""optional resource requirements to run workers (blank for unbounded allocations)""" + cpu_limit: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cpu_limit'), 'exclude': lambda f: f is None }}) + cpu_request: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cpu_request'), 'exclude': lambda f: f is None }}) + ephemeral_storage_limit: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ephemeral_storage_limit'), 'exclude': lambda f: f is None }}) + ephemeral_storage_request: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ephemeral_storage_request'), 'exclude': lambda f: f is None }}) + memory_limit: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('memory_limit'), 'exclude': lambda f: f is None }}) + memory_request: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('memory_request'), 'exclude': lambda f: f is None }}) + + diff --git a/src/airbyte_api/models/rowfilteringoperation.py b/src/airbyte_api/models/rowfilteringoperation.py new file mode 100644 index 00000000..f378d344 --- /dev/null +++ b/src/airbyte_api/models/rowfilteringoperation.py @@ -0,0 +1,32 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from .rowfilteringoperationtype import RowFilteringOperationType +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import List, Union + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class Not: + conditions: List[RowFilteringOperation] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('conditions') }}) + r"""Conditions to evaluate with the NOT operator.""" + type: RowFilteringOperationType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('type') }}) + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class Equal: + comparison_value: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('comparisonValue') }}) + r"""The value to compare the field against.""" + field_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('fieldName') }}) + r"""The name of the field to apply the operation on.""" + type: RowFilteringOperationType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('type') }}) + + + +RowFilteringOperation = Union[Equal, Not] diff --git a/src/airbyte_api/models/geographyenumnodefault.py b/src/airbyte_api/models/rowfilteringoperationtype.py similarity index 60% rename from src/airbyte_api/models/geographyenumnodefault.py rename to src/airbyte_api/models/rowfilteringoperationtype.py index 7ce8a84c..7ee01f98 100644 --- a/src/airbyte_api/models/geographyenumnodefault.py +++ b/src/airbyte_api/models/rowfilteringoperationtype.py @@ -4,7 +4,6 @@ from enum import Enum -class GeographyEnumNoDefault(str, Enum): - AUTO = 'auto' - US = 'us' - EU = 'eu' +class RowFilteringOperationType(str, Enum): + EQUAL = 'EQUAL' + NOT = 'NOT' diff --git a/src/airbyte_api/models/scopedresourcerequirements.py b/src/airbyte_api/models/scopedresourcerequirements.py new file mode 100644 index 00000000..c1c44807 --- /dev/null +++ b/src/airbyte_api/models/scopedresourcerequirements.py @@ -0,0 +1,20 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from .jobtyperesourcelimit import JobTypeResourceLimit +from .resourcerequirements import ResourceRequirements +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import List, Optional + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class ScopedResourceRequirements: + r"""actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.""" + default: Optional[ResourceRequirements] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('default'), 'exclude': lambda f: f is None }}) + r"""optional resource requirements to run workers (blank for unbounded allocations)""" + job_specific: Optional[List[JobTypeResourceLimit]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jobSpecific'), 'exclude': lambda f: f is None }}) + + diff --git a/src/airbyte_api/models/zendesk_chat.py b/src/airbyte_api/models/sharepoint_enterprise.py similarity index 65% rename from src/airbyte_api/models/zendesk_chat.py rename to src/airbyte_api/models/sharepoint_enterprise.py index 6853f0e4..2dfbfcf1 100644 --- a/src/airbyte_api/models/zendesk_chat.py +++ b/src/airbyte_api/models/sharepoint_enterprise.py @@ -9,18 +9,18 @@ @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class ZendeskChatCredentials: +class SharepointEnterpriseCredentials: client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }}) - r"""The Client ID of your OAuth application""" + r"""Client ID of your Microsoft developer application""" client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }}) - r"""The Client Secret of your OAuth application.""" + r"""Client Secret of your Microsoft developer application""" @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class ZendeskChat: - credentials: Optional[ZendeskChatCredentials] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) +class SharepointEnterprise: + credentials: Optional[SharepointEnterpriseCredentials] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/snowflake.py b/src/airbyte_api/models/snowflake.py deleted file mode 100644 index 1679cbc4..00000000 --- a/src/airbyte_api/models/snowflake.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from airbyte_api import utils -from dataclasses_json import Undefined, dataclass_json -from typing import Optional - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SnowflakeCredentials: - client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }}) - r"""The Client ID of your Snowflake developer application.""" - client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }}) - r"""The Client Secret of your Snowflake developer application.""" - - - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class Snowflake: - credentials: Optional[SnowflakeCredentials] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) - - diff --git a/src/airbyte_api/models/source_100ms.py b/src/airbyte_api/models/source_100ms.py new file mode 100644 index 00000000..1f91d0c4 --- /dev/null +++ b/src/airbyte_api/models/source_100ms.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class OneHundredms(str, Enum): + ONE_HUNDREDMS = '100ms' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class Source100ms: + management_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('management_token') }}) + r"""The management token used for authenticating API requests. You can find or generate this token in your 100ms dashboard under the API section. Refer to the documentation at https://www.100ms.live/docs/concepts/v2/concepts/security-and-tokens#management-token-for-rest-api for more details.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[OneHundredms] = dataclasses.field(default=OneHundredms.ONE_HUNDREDMS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_airbyte.py b/src/airbyte_api/models/source_airbyte.py index 1bae453c..9df33679 100644 --- a/src/airbyte_api/models/source_airbyte.py +++ b/src/airbyte_api/models/source_airbyte.py @@ -7,7 +7,7 @@ from dataclasses_json import Undefined, dataclass_json from datetime import datetime from enum import Enum -from typing import Final +from typing import Final, Optional class Airbyte(str, Enum): @@ -20,6 +20,8 @@ class SourceAirbyte: client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + host: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host'), 'exclude': lambda f: f is None }}) + r"""The Host URL of your Self-Managed Deployment (e.x. airbtye.mydomain.com)""" SOURCE_TYPE: Final[Airbyte] = dataclasses.field(default=Airbyte.AIRBYTE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_airtable.py b/src/airbyte_api/models/source_airtable.py index cca6f8fe..5f6eef33 100644 --- a/src/airbyte_api/models/source_airtable.py +++ b/src/airbyte_api/models/source_airtable.py @@ -34,7 +34,7 @@ class SourceAirtableOAuth20: client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) r"""The client ID of the Airtable developer application.""" client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) - r"""The client secret the Airtable developer application.""" + r"""The client secret of the Airtable developer application.""" refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }}) r"""The key to refresh the expired access token.""" access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_alpaca_broker_api.py b/src/airbyte_api/models/source_alpaca_broker_api.py new file mode 100644 index 00000000..953e92ae --- /dev/null +++ b/src/airbyte_api/models/source_alpaca_broker_api.py @@ -0,0 +1,38 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class SourceAlpacaBrokerAPIEnvironment(str, Enum): + r"""The trading environment, either 'live', 'paper' or 'broker-api.sandbox'.""" + API = 'api' + PAPER_API = 'paper-api' + BROKER_API_SANDBOX = 'broker-api.sandbox' + + +class AlpacaBrokerAPI(str, Enum): + ALPACA_BROKER_API = 'alpaca-broker-api' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceAlpacaBrokerAPI: + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + r"""API Key ID for the alpaca market""" + environment: Optional[SourceAlpacaBrokerAPIEnvironment] = dataclasses.field(default=SourceAlpacaBrokerAPIEnvironment.BROKER_API_SANDBOX, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('environment'), 'exclude': lambda f: f is None }}) + r"""The trading environment, either 'live', 'paper' or 'broker-api.sandbox'.""" + limit: Optional[str] = dataclasses.field(default='20', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('limit'), 'exclude': lambda f: f is None }}) + r"""Limit for each response objects""" + password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + r"""Your Alpaca API Secret Key. You can find this in the Alpaca developer web console under your account settings.""" + SOURCE_TYPE: Final[AlpacaBrokerAPI] = dataclasses.field(default=AlpacaBrokerAPI.ALPACA_BROKER_API, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_alpha_vantage.py b/src/airbyte_api/models/source_alpha_vantage.py new file mode 100644 index 00000000..a60c03b7 --- /dev/null +++ b/src/airbyte_api/models/source_alpha_vantage.py @@ -0,0 +1,45 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class Interval(str, Enum): + r"""Time-series data point interval. Required for intraday endpoints.""" + ONEMIN = '1min' + FIVEMIN = '5min' + FIFTEENMIN = '15min' + THIRTYMIN = '30min' + SIXTYMIN = '60min' + + +class OutputSize(str, Enum): + r"""Whether to return full or compact data (the last 100 data points).""" + COMPACT = 'compact' + FULL = 'full' + + +class AlphaVantage(str, Enum): + ALPHA_VANTAGE = 'alpha-vantage' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceAlphaVantage: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""API Key""" + symbol: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('symbol') }}) + r"""Stock symbol (with exchange code)""" + adjusted: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('adjusted'), 'exclude': lambda f: f is None }}) + r"""Whether to return adjusted data. Only applicable to intraday endpoints.""" + interval: Optional[Interval] = dataclasses.field(default=Interval.ONEMIN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('interval'), 'exclude': lambda f: f is None }}) + r"""Time-series data point interval. Required for intraday endpoints.""" + outputsize: Optional[OutputSize] = dataclasses.field(default=OutputSize.COMPACT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('outputsize'), 'exclude': lambda f: f is None }}) + r"""Whether to return full or compact data (the last 100 data points).""" + SOURCE_TYPE: Final[AlphaVantage] = dataclasses.field(default=AlphaVantage.ALPHA_VANTAGE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_amazon_ads.py b/src/airbyte_api/models/source_amazon_ads.py index 8724933f..fab2204a 100644 --- a/src/airbyte_api/models/source_amazon_ads.py +++ b/src/airbyte_api/models/source_amazon_ads.py @@ -20,29 +20,10 @@ class Region(str, Enum): FE = 'FE' -class ReportRecordTypeEnum(str, Enum): - r"""An enumeration.""" - AD_GROUPS = 'adGroups' - ASINS = 'asins' - ASINS_KEYWORDS = 'asins_keywords' - ASINS_TARGETS = 'asins_targets' - CAMPAIGNS = 'campaigns' - KEYWORDS = 'keywords' - PRODUCT_ADS = 'productAds' - TARGETS = 'targets' - - class SourceAmazonAdsAmazonAds(str, Enum): AMAZON_ADS = 'amazon-ads' -class StateFilterEnum(str, Enum): - r"""An enumeration.""" - ENABLED = 'enabled' - PAUSED = 'paused' - ARCHIVED = 'archived' - - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceAmazonAds: @@ -57,16 +38,14 @@ class SourceAmazonAds: r"""The amount of days to go back in time to get the updated data from Amazon Ads""" marketplace_ids: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('marketplace_ids'), 'exclude': lambda f: f is None }}) r"""Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.""" + num_workers: Optional[int] = dataclasses.field(default=10, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_workers'), 'exclude': lambda f: f is None }}) + r"""The number of worker threads to use for the sync.""" profiles: Optional[List[int]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('profiles'), 'exclude': lambda f: f is None }}) r"""Profile IDs you want to fetch data for. The Amazon Ads source connector supports only profiles with seller and vendor type, profiles with agency type will be ignored. See docs for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.""" region: Optional[Region] = dataclasses.field(default=Region.NA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region'), 'exclude': lambda f: f is None }}) r"""Region to pull data from (EU/NA/FE). See docs for more details.""" - report_record_types: Optional[List[ReportRecordTypeEnum]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('report_record_types'), 'exclude': lambda f: f is None }}) - r"""Optional configuration which accepts an array of string of record types. Leave blank for default behaviour to pull all report types. Use this config option only if you want to pull specific report type(s). See docs for more details""" SOURCE_TYPE: Final[SourceAmazonAdsAmazonAds] = dataclasses.field(default=SourceAmazonAdsAmazonAds.AMAZON_ADS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) r"""The Start date for collecting reports, should not be more than 60 days in the past. In YYYY-MM-DD format""" - state_filter: Optional[List[StateFilterEnum]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('state_filter'), 'exclude': lambda f: f is None }}) - r"""Reflects the state of the Display, Product, and Brand Campaign streams as enabled, paused, or archived. If you do not populate this field, it will be ignored completely.""" diff --git a/src/airbyte_api/models/source_amazon_seller_partner.py b/src/airbyte_api/models/source_amazon_seller_partner.py index 084e719b..b8c9cbb4 100644 --- a/src/airbyte_api/models/source_amazon_seller_partner.py +++ b/src/airbyte_api/models/source_amazon_seller_partner.py @@ -26,6 +26,23 @@ class AWSEnvironment(str, Enum): SANDBOX = 'SANDBOX' +class FinancialEventsStepSizeInDays(str, Enum): + r"""The time window size (in days) for fetching financial events data in chunks. Options are 1 day, 7 days, 14 days, 30 days, 60 days, and 190 days, based on API limitations. + + - **Smaller step sizes (e.g., 1 day)** are better for large data volumes. They fetch smaller chunks per request, reducing the risk of timeouts or overwhelming the API, though more requests may slow syncing and increase the chance of hitting rate limits. + - **Larger step sizes (e.g., 14 days)** are better for smaller data volumes. They fetch more data per request, speeding up syncing and reducing the number of API calls, which minimizes strain on rate limits. + + Select a step size that matches your data volume to optimize syncing speed and API performance. + """ + ONE = '1' + SEVEN = '7' + FOURTEEN = '14' + THIRTY = '30' + SIXTY = '60' + NINETY = '90' + ONE_HUNDRED_AND_EIGHTY = '180' + + class AWSRegion(str, Enum): r"""Select the AWS Region.""" AE = 'AE' @@ -99,14 +116,6 @@ class ReportName(str, Enum): GET_XML_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL = 'GET_XML_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL' GET_XML_BROWSE_TREE_DATA = 'GET_XML_BROWSE_TREE_DATA' GET_VENDOR_REAL_TIME_INVENTORY_REPORT = 'GET_VENDOR_REAL_TIME_INVENTORY_REPORT' - GET_BRAND_ANALYTICS_MARKET_BASKET_REPORT = 'GET_BRAND_ANALYTICS_MARKET_BASKET_REPORT' - GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT = 'GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT' - GET_BRAND_ANALYTICS_REPEAT_PURCHASE_REPORT = 'GET_BRAND_ANALYTICS_REPEAT_PURCHASE_REPORT' - GET_SALES_AND_TRAFFIC_REPORT = 'GET_SALES_AND_TRAFFIC_REPORT' - GET_VENDOR_SALES_REPORT = 'GET_VENDOR_SALES_REPORT' - GET_VENDOR_INVENTORY_REPORT = 'GET_VENDOR_INVENTORY_REPORT' - GET_VENDOR_NET_PURE_PRODUCT_MARGIN_REPORT = 'GET_VENDOR_NET_PURE_PRODUCT_MARGIN_REPORT' - GET_VENDOR_TRAFFIC_REPORT = 'GET_VENDOR_TRAFFIC_REPORT' @dataclass_json(undefined=Undefined.EXCLUDE) @@ -135,9 +144,23 @@ class SourceAmazonSellerPartner: r"""The Refresh Token obtained via OAuth flow authorization.""" account_type: Optional[AWSSellerPartnerAccountType] = dataclasses.field(default=AWSSellerPartnerAccountType.SELLER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('account_type'), 'exclude': lambda f: f is None }}) r"""Type of the Account you're going to authorize the Airbyte application by""" + app_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('app_id'), 'exclude': lambda f: f is None }}) + r"""Your Amazon Application ID.""" AUTH_TYPE: Final[Optional[SourceAmazonSellerPartnerAuthType]] = dataclasses.field(default=SourceAmazonSellerPartnerAuthType.OAUTH2_0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) aws_environment: Optional[AWSEnvironment] = dataclasses.field(default=AWSEnvironment.PRODUCTION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_environment'), 'exclude': lambda f: f is None }}) r"""Select the AWS Environment.""" + financial_events_step: Optional[FinancialEventsStepSizeInDays] = dataclasses.field(default=FinancialEventsStepSizeInDays.ONE_HUNDRED_AND_EIGHTY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('financial_events_step'), 'exclude': lambda f: f is None }}) + r"""The time window size (in days) for fetching financial events data in chunks. Options are 1 day, 7 days, 14 days, 30 days, 60 days, and 190 days, based on API limitations. + + - **Smaller step sizes (e.g., 1 day)** are better for large data volumes. They fetch smaller chunks per request, reducing the risk of timeouts or overwhelming the API, though more requests may slow syncing and increase the chance of hitting rate limits. + - **Larger step sizes (e.g., 14 days)** are better for smaller data volumes. They fetch more data per request, speeding up syncing and reducing the number of API calls, which minimizes strain on rate limits. + + Select a step size that matches your data volume to optimize syncing speed and API performance. + """ + max_async_job_count: Optional[int] = dataclasses.field(default=2, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_async_job_count'), 'exclude': lambda f: f is None }}) + r"""The maximum number of concurrent asynchronous job requests that can be active at a time.""" + num_workers: Optional[int] = dataclasses.field(default=2, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_workers'), 'exclude': lambda f: f is None }}) + r"""The number of workers to use for the connector when syncing concurrently.""" period_in_days: Optional[int] = dataclasses.field(default=90, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('period_in_days'), 'exclude': lambda f: f is None }}) r"""For syncs spanning a large date range, this option is used to request data in a smaller fixed window to improve sync reliability. This time window can be configured granularly by day.""" region: Optional[AWSRegion] = dataclasses.field(default=AWSRegion.US, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_amazon_sqs.py b/src/airbyte_api/models/source_amazon_sqs.py index 7c2fe5eb..da3a4c5e 100644 --- a/src/airbyte_api/models/source_amazon_sqs.py +++ b/src/airbyte_api/models/source_amazon_sqs.py @@ -49,27 +49,33 @@ class AmazonSqs(str, Enum): AMAZON_SQS = 'amazon-sqs' +class TheTargetedActionResourceForTheFetch(str, Enum): + r"""Note - Different targets have different attribute enum requirements, please refer actions sections in https://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/Welcome.html""" + GET_QUEUE_ATTRIBUTES = 'GetQueueAttributes' + RECEIVE_MESSAGE = 'ReceiveMessage' + + @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceAmazonSqs: + access_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key') }}) + r"""The Access Key ID of the AWS IAM Role to use for pulling messages""" queue_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('queue_url') }}) r"""URL of the SQS Queue""" - region: SourceAmazonSqsAWSRegion = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region') }}) - r"""AWS Region of the SQS Queue""" - access_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key'), 'exclude': lambda f: f is None }}) - r"""The Access Key ID of the AWS IAM Role to use for pulling messages""" - attributes_to_return: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('attributes_to_return'), 'exclude': lambda f: f is None }}) + secret_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_key') }}) + r"""The Secret Key of the AWS IAM Role to use for pulling messages""" + attributes_to_return: Optional[str] = dataclasses.field(default='All', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('attributes_to_return'), 'exclude': lambda f: f is None }}) r"""Comma separated list of Mesage Attribute names to return""" - delete_messages: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delete_messages'), 'exclude': lambda f: f is None }}) - r"""If Enabled, messages will be deleted from the SQS Queue after being read. If Disabled, messages are left in the queue and can be read more than once. WARNING: Enabling this option can result in data loss in cases of failure, use with caution, see documentation for more detail.""" - max_batch_size: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_batch_size'), 'exclude': lambda f: f is None }}) + max_batch_size: Optional[int] = dataclasses.field(default=10, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_batch_size'), 'exclude': lambda f: f is None }}) r"""Max amount of messages to get in one batch (10 max)""" - max_wait_time: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_wait_time'), 'exclude': lambda f: f is None }}) + max_wait_time: Optional[int] = dataclasses.field(default=20, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_wait_time'), 'exclude': lambda f: f is None }}) r"""Max amount of time in seconds to wait for messages in a single poll (20 max)""" - secret_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_key'), 'exclude': lambda f: f is None }}) - r"""The Secret Key of the AWS IAM Role to use for pulling messages""" + region: Optional[SourceAmazonSqsAWSRegion] = dataclasses.field(default=SourceAmazonSqsAWSRegion.US_EAST_1, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region'), 'exclude': lambda f: f is None }}) + r"""AWS Region of the SQS Queue""" SOURCE_TYPE: Final[AmazonSqs] = dataclasses.field(default=AmazonSqs.AMAZON_SQS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) - visibility_timeout: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('visibility_timeout'), 'exclude': lambda f: f is None }}) + target: Optional[TheTargetedActionResourceForTheFetch] = dataclasses.field(default=TheTargetedActionResourceForTheFetch.RECEIVE_MESSAGE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('target'), 'exclude': lambda f: f is None }}) + r"""Note - Different targets have different attribute enum requirements, please refer actions sections in https://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/Welcome.html""" + visibility_timeout: Optional[int] = dataclasses.field(default=20, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('visibility_timeout'), 'exclude': lambda f: f is None }}) r"""Modify the Visibility Timeout of the individual message from the Queue's default (seconds).""" diff --git a/src/airbyte_api/models/source_amplitude.py b/src/airbyte_api/models/source_amplitude.py index d1414294..b43172a7 100644 --- a/src/airbyte_api/models/source_amplitude.py +++ b/src/airbyte_api/models/source_amplitude.py @@ -30,11 +30,11 @@ class SourceAmplitude: start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) r"""UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated.""" active_users_group_by_country: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('active_users_group_by_country'), 'exclude': lambda f: f is None }}) - r"""According to Considerations the grouping by `Country` is optional, if you're facing issues fetching the stream, or checking the connection please set this to `False` instead.""" + r"""According to Amplitude documentation, grouping by `Country` is optional. If you face issues fetching the stream or checking the connection please set this field to `False`.""" data_region: Optional[DataRegion] = dataclasses.field(default=DataRegion.STANDARD_SERVER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_region'), 'exclude': lambda f: f is None }}) r"""Amplitude data region server""" request_time_range: Optional[int] = dataclasses.field(default=24, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('request_time_range'), 'exclude': lambda f: f is None }}) - r"""According to Considerations too big time range in request can cause a timeout error. In this case, set shorter time interval in hours.""" + r"""According to Considerations too large of a time range in te request can cause a timeout error. In this case, please provide a shorter time interval in hours.""" SOURCE_TYPE: Final[Amplitude] = dataclasses.field(default=Amplitude.AMPLITUDE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_apple_search_ads.py b/src/airbyte_api/models/source_apple_search_ads.py index 3fc39d15..14db9f91 100644 --- a/src/airbyte_api/models/source_apple_search_ads.py +++ b/src/airbyte_api/models/source_apple_search_ads.py @@ -12,6 +12,12 @@ class AppleSearchAds(str, Enum): APPLE_SEARCH_ADS = 'apple-search-ads' +class TimeZone(str, Enum): + r"""The timezone for the reporting data. Use 'ORTZ' for Organization Time Zone or 'UTC' for Coordinated Universal Time. Default is UTC.""" + ORTZ = 'ORTZ' + UTC = 'UTC' + + @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceAppleSearchAds: @@ -23,8 +29,16 @@ class SourceAppleSearchAds: r"""The identifier of the organization that owns the campaign. Your Org Id is the same as your account in the Apple Search Ads UI.""" start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }}) r"""Start getting data from that date.""" + backoff_factor: Optional[int] = dataclasses.field(default=5, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('backoff_factor'), 'exclude': lambda f: f is None }}) + r"""This factor factor determines the delay increase factor between retryable failures. Valid values are integers between 1 and 20.""" end_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'exclude': lambda f: f is None }}) r"""Data is retrieved until that date (included)""" + lookback_window: Optional[int] = dataclasses.field(default=30, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window'), 'exclude': lambda f: f is None }}) + r"""Apple Search Ads uses a 30-day attribution window. However, you may consider smaller values in order to shorten sync durations, at the cost of missing late data attributions.""" SOURCE_TYPE: Final[AppleSearchAds] = dataclasses.field(default=AppleSearchAds.APPLE_SEARCH_ADS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + timezone: Optional[TimeZone] = dataclasses.field(default=TimeZone.UTC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('timezone'), 'exclude': lambda f: f is None }}) + r"""The timezone for the reporting data. Use 'ORTZ' for Organization Time Zone or 'UTC' for Coordinated Universal Time. Default is UTC.""" + token_refresh_endpoint: Optional[str] = dataclasses.field(default='https://appleid.apple.com/auth/oauth2/token?grant_type=client_credentials&scope=searchadsorg', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token_refresh_endpoint'), 'exclude': lambda f: f is None }}) + r"""Token Refresh Endpoint. You should override the default value in scenarios where it's required to proxy requests to Apple's token endpoint""" diff --git a/src/airbyte_api/models/source_appsflyer.py b/src/airbyte_api/models/source_appsflyer.py new file mode 100644 index 00000000..8884cd09 --- /dev/null +++ b/src/airbyte_api/models/source_appsflyer.py @@ -0,0 +1,28 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class Appsflyer(str, Enum): + APPSFLYER = 'appsflyer' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceAppsflyer: + api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }}) + r"""Pull API token for authentication. If you change the account admin, the token changes, and you must update scripts with the new token. Get the API token in the Dashboard.""" + app_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('app_id') }}) + r"""App identifier as found in AppsFlyer.""" + start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }}) + r"""The default value to use if no bookmark exists for an endpoint. Raw Reports historical lookback is limited to 90 days.""" + SOURCE_TYPE: Final[Appsflyer] = dataclasses.field(default=Appsflyer.APPSFLYER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + timezone: Optional[str] = dataclasses.field(default='UTC', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('timezone'), 'exclude': lambda f: f is None }}) + r"""Time zone in which date times are stored. The project timezone may be found in the App settings in the AppsFlyer console.""" + + diff --git a/src/airbyte_api/models/source_asana.py b/src/airbyte_api/models/source_asana.py index e4d6d41e..a1f7e72e 100644 --- a/src/airbyte_api/models/source_asana.py +++ b/src/airbyte_api/models/source_asana.py @@ -50,6 +50,8 @@ class SourceAsanaAsana(str, Enum): class SourceAsana: credentials: Optional[AuthenticationMechanism] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) r"""Choose how to authenticate to Github""" + num_workers: Optional[int] = dataclasses.field(default=10, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_workers'), 'exclude': lambda f: f is None }}) + r"""The number of worker threads to use for the sync. The performance upper boundary is based on the limit of your Asana pricing plan. More info about the rate limit tiers can be found on Asana's API docs.""" organization_export_ids: Optional[List[Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('organization_export_ids'), 'exclude': lambda f: f is None }}) r"""Globally unique identifiers for the organization exports""" SOURCE_TYPE: Final[Optional[SourceAsanaAsana]] = dataclasses.field(default=SourceAsanaAsana.ASANA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_assemblyai.py b/src/airbyte_api/models/source_assemblyai.py new file mode 100644 index 00000000..596035bb --- /dev/null +++ b/src/airbyte_api/models/source_assemblyai.py @@ -0,0 +1,35 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Assemblyai(str, Enum): + ASSEMBLYAI = 'assemblyai' + + +class SubtitleFormat(str, Enum): + r"""The subtitle format for transcript_subtitle stream""" + VTT = 'vtt' + SRT = 'srt' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceAssemblyai: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your AssemblyAI API key. You can find it in the AssemblyAI dashboard at https://www.assemblyai.com/app/api-keys.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + request_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('request_id'), 'exclude': lambda f: f is None }}) + r"""The request ID for LeMur responses""" + SOURCE_TYPE: Final[Assemblyai] = dataclasses.field(default=Assemblyai.ASSEMBLYAI, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + subtitle_format: Optional[SubtitleFormat] = dataclasses.field(default=SubtitleFormat.SRT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('subtitle_format'), 'exclude': lambda f: f is None }}) + r"""The subtitle format for transcript_subtitle stream""" + + diff --git a/src/airbyte_api/models/source_auth0.py b/src/airbyte_api/models/source_auth0.py index d23155a2..b76133c2 100644 --- a/src/airbyte_api/models/source_auth0.py +++ b/src/airbyte_api/models/source_auth0.py @@ -8,7 +8,7 @@ from typing import Final, Optional, Union -class SourceAuth0SchemasCredentialsAuthenticationMethod(str, Enum): +class SourceAuth0SchemasAuthenticationMethod(str, Enum): OAUTH2_ACCESS_TOKEN = 'oauth2_access_token' @@ -17,12 +17,12 @@ class SourceAuth0SchemasCredentialsAuthenticationMethod(str, Enum): class OAuth2AccessToken: access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) r"""Also called API Access Token The access token used to call the Auth0 Management API Token. It's a JWT that contains specific grant permissions knowns as scopes.""" - AUTH_TYPE: Final[SourceAuth0SchemasCredentialsAuthenticationMethod] = dataclasses.field(default=SourceAuth0SchemasCredentialsAuthenticationMethod.OAUTH2_ACCESS_TOKEN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) + AUTH_TYPE: Final[SourceAuth0SchemasAuthenticationMethod] = dataclasses.field(default=SourceAuth0SchemasAuthenticationMethod.OAUTH2_ACCESS_TOKEN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) -class SourceAuth0SchemasAuthenticationMethod(str, Enum): +class SourceAuth0SchemasCredentialsAuthenticationMethod(str, Enum): OAUTH2_CONFIDENTIAL_APPLICATION = 'oauth2_confidential_application' @@ -35,7 +35,7 @@ class OAuth2ConfidentialApplication: r"""Your application's Client ID. You can find this value on the application's settings tab after you login the admin portal.""" client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) r"""Your application's Client Secret. You can find this value on the application's settings tab after you login the admin portal.""" - AUTH_TYPE: Final[SourceAuth0SchemasAuthenticationMethod] = dataclasses.field(default=SourceAuth0SchemasAuthenticationMethod.OAUTH2_CONFIDENTIAL_APPLICATION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) + AUTH_TYPE: Final[SourceAuth0SchemasCredentialsAuthenticationMethod] = dataclasses.field(default=SourceAuth0SchemasCredentialsAuthenticationMethod.OAUTH2_CONFIDENTIAL_APPLICATION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) diff --git a/src/airbyte_api/models/source_aviationstack.py b/src/airbyte_api/models/source_aviationstack.py new file mode 100644 index 00000000..89faf3df --- /dev/null +++ b/src/airbyte_api/models/source_aviationstack.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Aviationstack(str, Enum): + AVIATIONSTACK = 'aviationstack' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceAviationstack: + access_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key') }}) + r"""Your unique API key for authenticating with the Aviation API. You can find it in your Aviation account dashboard at https://aviationstack.com/dashboard""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Aviationstack] = dataclasses.field(default=Aviationstack.AVIATIONSTACK, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_awin_advertiser.py b/src/airbyte_api/models/source_awin_advertiser.py new file mode 100644 index 00000000..48f13c2a --- /dev/null +++ b/src/airbyte_api/models/source_awin_advertiser.py @@ -0,0 +1,33 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import date +from enum import Enum +from typing import Final, Optional + + +class AwinAdvertiser(str, Enum): + AWIN_ADVERTISER = 'awin-advertiser' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceAwinAdvertiser: + advertiser_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('advertiserId') }}) + r"""Your Awin Advertiser ID. You can find this in your Awin dashboard or account settings.""" + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your Awin API key. Generate this from your Awin account under API Credentials.""" + lookback_days: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_days') }}) + r"""Number of days to look back on each sync to catch any updates to existing records.""" + start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat }}) + r"""Start date for data replication in YYYY-MM-DD format""" + SOURCE_TYPE: Final[AwinAdvertiser] = dataclasses.field(default=AwinAdvertiser.AWIN_ADVERTISER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + step_increment: Optional[str] = dataclasses.field(default='P400D', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('step_increment'), 'exclude': lambda f: f is None }}) + r"""The time window size for each API request in ISO8601 duration format. + For the campaign performance stream, Awin API explicitly limits the period between startDate and endDate to 400 days maximum. + """ + + diff --git a/src/airbyte_api/models/source_azure_blob_storage.py b/src/airbyte_api/models/source_azure_blob_storage.py index 9aec3e5b..8953169d 100644 --- a/src/airbyte_api/models/source_azure_blob_storage.py +++ b/src/airbyte_api/models/source_azure_blob_storage.py @@ -10,7 +10,7 @@ from typing import Final, List, Optional, Union -class SourceAzureBlobStorageSchemasAuthType(str, Enum): +class SourceAzureBlobStorageSchemasCredentialsAuthType(str, Enum): STORAGE_ACCOUNT_KEY = 'storage_account_key' @@ -19,7 +19,25 @@ class SourceAzureBlobStorageSchemasAuthType(str, Enum): class AuthenticateViaStorageAccountKey: azure_blob_storage_account_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_account_key') }}) r"""The Azure blob storage account key.""" - AUTH_TYPE: Final[Optional[SourceAzureBlobStorageSchemasAuthType]] = dataclasses.field(default=SourceAzureBlobStorageSchemasAuthType.STORAGE_ACCOUNT_KEY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) + AUTH_TYPE: Final[Optional[SourceAzureBlobStorageSchemasCredentialsAuthType]] = dataclasses.field(default=SourceAzureBlobStorageSchemasCredentialsAuthType.STORAGE_ACCOUNT_KEY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) + + + + +class SourceAzureBlobStorageSchemasAuthType(str, Enum): + CLIENT_CREDENTIALS = 'client_credentials' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class AuthenticateViaClientCredentials: + app_client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('app_client_id') }}) + r"""Client ID of your Microsoft developer application""" + app_client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('app_client_secret') }}) + r"""Client Secret of your Microsoft developer application""" + app_tenant_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('app_tenant_id') }}) + r"""Tenant ID of the Microsoft Azure Application""" + AUTH_TYPE: Final[Optional[SourceAzureBlobStorageSchemasAuthType]] = dataclasses.field(default=SourceAzureBlobStorageSchemasAuthType.CLIENT_CREDENTIALS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) @@ -75,7 +93,7 @@ class ParsingStrategy(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class DocumentFileTypeFormatExperimental: +class UnstructuredDocumentFormat: r"""Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.""" FILETYPE: Final[Optional[SourceAzureBlobStorageSchemasStreamsFormatFiletype]] = dataclasses.field(default=SourceAzureBlobStorageSchemasStreamsFormatFiletype.UNSTRUCTURED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) processing: Optional[Processing] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('processing'), 'exclude': lambda f: f is None }}) @@ -224,6 +242,8 @@ class FileBasedStreamConfig: r"""The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.""" input_schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('input_schema'), 'exclude': lambda f: f is None }}) r"""The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.""" + recent_n_files_to_read_for_schema_discovery: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('recent_n_files_to_read_for_schema_discovery'), 'exclude': lambda f: f is None }}) + r"""The number of resent files which will be used to discover the schema for this stream.""" schemaless: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schemaless'), 'exclude': lambda f: f is None }}) r"""When enabled, syncs will not validate or structure records against the stream's schema.""" validation_policy: Optional[ValidationPolicy] = dataclasses.field(default=ValidationPolicy.EMIT_RECORD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('validation_policy'), 'exclude': lambda f: f is None }}) @@ -254,10 +274,10 @@ class SourceAzureBlobStorage: -SourceAzureBlobStorageAuthentication = Union[AuthenticateViaOauth2, AuthenticateViaStorageAccountKey] +SourceAzureBlobStorageAuthentication = Union[AuthenticateViaOauth2, AuthenticateViaClientCredentials, AuthenticateViaStorageAccountKey] Processing = Union[Local] CSVHeaderDefinition = Union[FromCSV, Autogenerated, UserProvided] -Format = Union[AvroFormat, CSVFormat, JsonlFormat, ParquetFormat, DocumentFileTypeFormatExperimental] +Format = Union[AvroFormat, CSVFormat, JsonlFormat, ParquetFormat, UnstructuredDocumentFormat] diff --git a/src/airbyte_api/models/source_babelforce.py b/src/airbyte_api/models/source_babelforce.py new file mode 100644 index 00000000..06ca4563 --- /dev/null +++ b/src/airbyte_api/models/source_babelforce.py @@ -0,0 +1,37 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class SourceBabelforceRegion(str, Enum): + r"""Babelforce region""" + SERVICES = 'services' + US_EAST = 'us-east' + AP_SOUTHEAST = 'ap-southeast' + + +class Babelforce(str, Enum): + BABELFORCE = 'babelforce' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceBabelforce: + access_key_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_key_id') }}) + r"""The Babelforce access key ID""" + access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) + r"""The Babelforce access token""" + date_created_from: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('date_created_from'), 'exclude': lambda f: f is None }}) + r"""Timestamp in Unix the replication from Babelforce API will start from. For example 1651363200 which corresponds to 2022-05-01 00:00:00.""" + date_created_to: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('date_created_to'), 'exclude': lambda f: f is None }}) + r"""Timestamp in Unix the replication from Babelforce will be up to. For example 1651363200 which corresponds to 2022-05-01 00:00:00.""" + region: Optional[SourceBabelforceRegion] = dataclasses.field(default=SourceBabelforceRegion.SERVICES, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region'), 'exclude': lambda f: f is None }}) + r"""Babelforce region""" + SOURCE_TYPE: Final[Babelforce] = dataclasses.field(default=Babelforce.BABELFORCE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_bamboo_hr.py b/src/airbyte_api/models/source_bamboo_hr.py index 724ddd90..06883125 100644 --- a/src/airbyte_api/models/source_bamboo_hr.py +++ b/src/airbyte_api/models/source_bamboo_hr.py @@ -25,6 +25,8 @@ class SourceBambooHr: r"""Comma-separated list of fields to include in custom reports.""" custom_reports_include_default_fields: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_reports_include_default_fields'), 'exclude': lambda f: f is None }}) r"""If true, the custom reports endpoint will include the default fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names.""" + employee_fields: Optional[str] = dataclasses.field(default='firstName,lastName', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('employee_fields'), 'exclude': lambda f: f is None }}) + r"""Comma-separated list of fields to include for employees.""" SOURCE_TYPE: Final[BambooHr] = dataclasses.field(default=BambooHr.BAMBOO_HR, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_bluetally.py b/src/airbyte_api/models/source_bluetally.py new file mode 100644 index 00000000..be3ef345 --- /dev/null +++ b/src/airbyte_api/models/source_bluetally.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Bluetally(str, Enum): + BLUETALLY = 'bluetally' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceBluetally: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your API key to authenticate with the BlueTally API. You can generate it by navigating to your account settings, selecting 'API Keys', and clicking 'Create API Key'.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Bluetally] = dataclasses.field(default=Bluetally.BLUETALLY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_boldsign.py b/src/airbyte_api/models/source_boldsign.py new file mode 100644 index 00000000..24413d1b --- /dev/null +++ b/src/airbyte_api/models/source_boldsign.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Boldsign(str, Enum): + BOLDSIGN = 'boldsign' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceBoldsign: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your BoldSign API key. You can generate it by navigating to the API menu in the BoldSign app, selecting 'API Key', and clicking 'Generate API Key'. Copy the generated key and paste it here.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Boldsign] = dataclasses.field(default=Boldsign.BOLDSIGN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_breezometer.py b/src/airbyte_api/models/source_breezometer.py new file mode 100644 index 00000000..3dd7b497 --- /dev/null +++ b/src/airbyte_api/models/source_breezometer.py @@ -0,0 +1,34 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class Breezometer(str, Enum): + BREEZOMETER = 'breezometer' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceBreezometer: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your API Access Key. See here.""" + latitude: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('latitude') }}) + r"""Latitude of the monitored location.""" + longitude: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('longitude') }}) + r"""Longitude of the monitored location.""" + days_to_forecast: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('days_to_forecast'), 'exclude': lambda f: f is None }}) + r"""Number of days to forecast. Minimum 1, maximum 3. Valid for Polen and Weather Forecast streams.""" + historic_hours: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('historic_hours'), 'exclude': lambda f: f is None }}) + r"""Number of hours retireve from Air Quality History stream. Minimum 1, maximum 720.""" + hours_to_forecast: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hours_to_forecast'), 'exclude': lambda f: f is None }}) + r"""Number of hours to forecast. Minimum 1, maximum 96. Valid for Air Quality Forecast stream.""" + radius: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('radius'), 'exclude': lambda f: f is None }}) + r"""Desired radius from the location provided. Minimum 5, maximum 100. Valid for Wildfires streams.""" + SOURCE_TYPE: Final[Breezometer] = dataclasses.field(default=Breezometer.BREEZOMETER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_calendly.py b/src/airbyte_api/models/source_calendly.py index 48f34a82..c318c921 100644 --- a/src/airbyte_api/models/source_calendly.py +++ b/src/airbyte_api/models/source_calendly.py @@ -7,7 +7,7 @@ from dataclasses_json import Undefined, dataclass_json from datetime import datetime from enum import Enum -from typing import Final +from typing import Final, Optional class Calendly(str, Enum): @@ -20,6 +20,8 @@ class SourceCalendly: api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) r"""Go to Integrations → API & Webhooks to obtain your bearer token. https://calendly.com/integrations/api_webhooks""" start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + lookback_days: Optional[float] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_days'), 'exclude': lambda f: f is None }}) + r"""Number of days to be subtracted from the last cutoff date before starting to sync the `scheduled_events` stream.""" SOURCE_TYPE: Final[Calendly] = dataclasses.field(default=Calendly.CALENDLY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_captain_data.py b/src/airbyte_api/models/source_captain_data.py new file mode 100644 index 00000000..6c582d2a --- /dev/null +++ b/src/airbyte_api/models/source_captain_data.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class CaptainData(str, Enum): + CAPTAIN_DATA = 'captain-data' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceCaptainData: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your Captain Data project API key.""" + project_uid: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project_uid') }}) + r"""Your Captain Data project uuid.""" + SOURCE_TYPE: Final[CaptainData] = dataclasses.field(default=CaptainData.CAPTAIN_DATA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_churnkey.py b/src/airbyte_api/models/source_churnkey.py new file mode 100644 index 00000000..24da603c --- /dev/null +++ b/src/airbyte_api/models/source_churnkey.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Churnkey(str, Enum): + CHURNKEY = 'churnkey' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceChurnkey: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + x_ck_app: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('x-ck-app') }}) + SOURCE_TYPE: Final[Churnkey] = dataclasses.field(default=Churnkey.CHURNKEY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_circleci.py b/src/airbyte_api/models/source_circleci.py index e1e1cb5c..15e02d49 100644 --- a/src/airbyte_api/models/source_circleci.py +++ b/src/airbyte_api/models/source_circleci.py @@ -7,7 +7,7 @@ from dataclasses_json import Undefined, dataclass_json from datetime import datetime from enum import Enum -from typing import Final, Optional +from typing import Any, Final, List, Optional class Circleci(str, Enum): @@ -21,16 +21,12 @@ class SourceCircleci: org_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('org_id') }}) r"""The org ID found in `https://app.circleci.com/settings/organization/circleci/xxxxx/overview`""" project_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project_id') }}) - r"""Project ID found in the project settings""" + r"""Project ID found in the project settings, Visit `https://app.circleci.com/settings/project/circleci/ORG_SLUG/YYYYY`""" start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) - job_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('job_id'), 'exclude': lambda f: f is None }}) - r"""Job ID for fetching information""" job_number: Optional[str] = dataclasses.field(default='2', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('job_number'), 'exclude': lambda f: f is None }}) - r"""Job Number of the workflow""" + r"""Job Number of the workflow for `jobs` stream, Auto fetches from `workflow_jobs` stream, if not configured""" SOURCE_TYPE: Final[Circleci] = dataclasses.field(default=Circleci.CIRCLECI, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) - workflow_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workflow_id'), 'exclude': lambda f: f is None }}) - r"""workflow ID of a project pipeline""" - workflow_name: Optional[str] = dataclasses.field(default='build-and-test', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workflow_name'), 'exclude': lambda f: f is None }}) - r"""Workflow name for fetching information""" + workflow_id: Optional[List[Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workflow_id'), 'exclude': lambda f: f is None }}) + r"""Workflow ID of a project pipeline, Could be seen in the URL of pipeline build, Example `https://app.circleci.com/pipelines/circleci/55555xxxxxx/7yyyyyyyyxxxxx/2/workflows/WORKFLOW_ID`""" diff --git a/src/airbyte_api/models/source_cisco_meraki.py b/src/airbyte_api/models/source_cisco_meraki.py new file mode 100644 index 00000000..8b93af64 --- /dev/null +++ b/src/airbyte_api/models/source_cisco_meraki.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class CiscoMeraki(str, Enum): + CISCO_MERAKI = 'cisco-meraki' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceCiscoMeraki: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your Meraki API key. Obtain it by logging into your Meraki Dashboard at https://dashboard.meraki.com/, navigating to 'My Profile' via the avatar icon in the top right corner, and generating the API key. Save this key securely as it represents your admin credentials.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[CiscoMeraki] = dataclasses.field(default=CiscoMeraki.CISCO_MERAKI, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_coingecko_coins.py b/src/airbyte_api/models/source_coingecko_coins.py new file mode 100644 index 00000000..b68d7149 --- /dev/null +++ b/src/airbyte_api/models/source_coingecko_coins.py @@ -0,0 +1,47 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import date +from enum import Enum +from typing import Final, Optional + + +class Days(str, Enum): + r"""The number of days of data for market chart.""" + ONE = '1' + SEVEN = '7' + FOURTEEN = '14' + THIRTY = '30' + NINETY = '90' + ONE_HUNDRED_AND_EIGHTY = '180' + THREE_HUNDRED_AND_SIXTY_FIVE = '365' + MAX = 'max' + + +class CoingeckoCoins(str, Enum): + COINGECKO_COINS = 'coingecko-coins' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceCoingeckoCoins: + coin_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('coin_id') }}) + r"""CoinGecko coin ID (e.g. bitcoin). Can be retrieved from the + `/coins/list` endpoint. + """ + start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat }}) + r"""The start date for the historical data stream in dd-mm-yyyy format.""" + vs_currency: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('vs_currency') }}) + r"""The target currency of market data (e.g. usd, eur, jpy, etc.)""" + api_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key'), 'exclude': lambda f: f is None }}) + r"""API Key (for pro users)""" + days: Optional[Days] = dataclasses.field(default=Days.THIRTY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('days'), 'exclude': lambda f: f is None }}) + r"""The number of days of data for market chart.""" + end_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) + r"""The end date for the historical data stream in dd-mm-yyyy format.""" + SOURCE_TYPE: Final[CoingeckoCoins] = dataclasses.field(default=CoingeckoCoins.COINGECKO_COINS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_convertkit.py b/src/airbyte_api/models/source_convertkit.py index 278c7784..ff8b694d 100644 --- a/src/airbyte_api/models/source_convertkit.py +++ b/src/airbyte_api/models/source_convertkit.py @@ -2,10 +2,48 @@ from __future__ import annotations import dataclasses +import dateutil.parser from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json +from datetime import datetime from enum import Enum -from typing import Final +from typing import Final, Optional, Union + + +class SourceConvertkitSchemasAuthType(str, Enum): + API_KEY = 'api_key' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class APIKey: + api_key: Optional[str] = dataclasses.field(default='{{ config.get(\'credentials\',{}).get(\'api_key\') or config.get(\'api_secret\') }}', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key'), 'exclude': lambda f: f is None }}) + r"""Kit/ConvertKit API Key""" + AUTH_TYPE: Final[SourceConvertkitSchemasAuthType] = dataclasses.field(default=SourceConvertkitSchemasAuthType.API_KEY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) + + + + +class SourceConvertkitAuthType(str, Enum): + OAUTH2_0 = 'oauth2.0' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceConvertkitOAuth20: + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + r"""The client ID of your OAuth application.""" + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + r"""The client secret of your OAuth application.""" + refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }}) + r"""A current, non-expired refresh token genereted using the provided client ID and secret.""" + access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token'), 'exclude': lambda f: f is None }}) + r"""An access token generated using the provided client information and refresh token.""" + AUTH_TYPE: Final[SourceConvertkitAuthType] = dataclasses.field(default=SourceConvertkitAuthType.OAUTH2_0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) + expires_at: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expires_at'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) + r"""The time at which the current access token is set to expire""" + + class Convertkit(str, Enum): @@ -15,8 +53,10 @@ class Convertkit(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceConvertkit: - api_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_secret') }}) - r"""API Secret""" + credentials: AuthenticationType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }}) SOURCE_TYPE: Final[Convertkit] = dataclasses.field(default=Convertkit.CONVERTKIT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + start_date: Optional[datetime] = dataclasses.field(default=dateutil.parser.isoparse('2013-01-01T00:00:00Z'), metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) + +AuthenticationType = Union[SourceConvertkitOAuth20, APIKey] diff --git a/src/airbyte_api/models/source_couchbase.py b/src/airbyte_api/models/source_couchbase.py new file mode 100644 index 00000000..a45ccb53 --- /dev/null +++ b/src/airbyte_api/models/source_couchbase.py @@ -0,0 +1,32 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Couchbase(str, Enum): + COUCHBASE = 'couchbase' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceCouchbase: + bucket: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('bucket') }}) + r"""The name of the bucket to sync data from""" + connection_string: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connection_string') }}) + r"""The connection string for the Couchbase server (e.g., couchbase://localhost or couchbases://example.com)""" + password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) + r"""The password to use for authentication""" + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + r"""The username to use for authentication""" + SOURCE_TYPE: Final[Couchbase] = dataclasses.field(default=Couchbase.COUCHBASE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) + r"""The date from which you'd like to replicate data for incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. If not set, all data will be replicated.""" + + diff --git a/src/airbyte_api/models/source_customer_io.py b/src/airbyte_api/models/source_customer_io.py index d033b363..f4623afe 100644 --- a/src/airbyte_api/models/source_customer_io.py +++ b/src/airbyte_api/models/source_customer_io.py @@ -8,7 +8,7 @@ from typing import Final -class CustomerIo(str, Enum): +class SourceCustomerIoCustomerIo(str, Enum): CUSTOMER_IO = 'customer-io' @@ -16,6 +16,6 @@ class CustomerIo(str, Enum): @dataclasses.dataclass class SourceCustomerIo: app_api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('app_api_key') }}) - SOURCE_TYPE: Final[CustomerIo] = dataclasses.field(default=CustomerIo.CUSTOMER_IO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + SOURCE_TYPE: Final[SourceCustomerIoCustomerIo] = dataclasses.field(default=SourceCustomerIoCustomerIo.CUSTOMER_IO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_customerly.py b/src/airbyte_api/models/source_customerly.py new file mode 100644 index 00000000..13b891f2 --- /dev/null +++ b/src/airbyte_api/models/source_customerly.py @@ -0,0 +1,21 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Customerly(str, Enum): + CUSTOMERLY = 'customerly' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceCustomerly: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + SOURCE_TYPE: Final[Customerly] = dataclasses.field(default=Customerly.CUSTOMERLY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_datadog.py b/src/airbyte_api/models/source_datadog.py index c116f947..243df844 100644 --- a/src/airbyte_api/models/source_datadog.py +++ b/src/airbyte_api/models/source_datadog.py @@ -49,7 +49,7 @@ class SourceDatadog: r"""Datadog API key""" application_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('application_key') }}) r"""Datadog application key""" - end_date: Optional[str] = dataclasses.field(default='2024-01-01T00:00:00Z', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'exclude': lambda f: f is None }}) + end_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'exclude': lambda f: f is None }}) r"""UTC date and time in the format 2017-01-25T00:00:00Z. Data after this date will not be replicated. An empty value will represent the current datetime for each execution. This just applies to Incremental syncs.""" max_records_per_request: Optional[int] = dataclasses.field(default=5000, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_records_per_request'), 'exclude': lambda f: f is None }}) r"""Maximum number of records to collect per request.""" diff --git a/src/airbyte_api/models/source_ding_connect.py b/src/airbyte_api/models/source_ding_connect.py new file mode 100644 index 00000000..58a9ae08 --- /dev/null +++ b/src/airbyte_api/models/source_ding_connect.py @@ -0,0 +1,27 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class DingConnect(str, Enum): + DING_CONNECT = 'ding-connect' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceDingConnect: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your API key for authenticating with the DingConnect API. You can generate this key by navigating to the Developer tab in the Account Settings section of your DingConnect account.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + x_correlation_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('X-Correlation-Id'), 'exclude': lambda f: f is None }}) + r"""Optional header to correlate HTTP requests between a client and server.""" + SOURCE_TYPE: Final[DingConnect] = dataclasses.field(default=DingConnect.DING_CONNECT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_docuseal.py b/src/airbyte_api/models/source_docuseal.py new file mode 100644 index 00000000..7f277529 --- /dev/null +++ b/src/airbyte_api/models/source_docuseal.py @@ -0,0 +1,27 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Docuseal(str, Enum): + DOCUSEAL = 'docuseal' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceDocuseal: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your API key for authenticating with the DocuSeal API. Obtain it from the DocuSeal API Console at https://console.docuseal.com/api.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + limit: Optional[str] = dataclasses.field(default='5', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('limit'), 'exclude': lambda f: f is None }}) + r"""The pagination limit""" + SOURCE_TYPE: Final[Docuseal] = dataclasses.field(default=Docuseal.DOCUSEAL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_dolibarr.py b/src/airbyte_api/models/source_dolibarr.py new file mode 100644 index 00000000..6a8e9fda --- /dev/null +++ b/src/airbyte_api/models/source_dolibarr.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Dolibarr(str, Enum): + DOLIBARR = 'dolibarr' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceDolibarr: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + my_dolibarr_domain_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('my_dolibarr_domain_url') }}) + r"""enter your \\"domain/dolibarr_url\\" without https:// Example: mydomain.com/dolibarr""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Dolibarr] = dataclasses.field(default=Dolibarr.DOLIBARR, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_dwolla.py b/src/airbyte_api/models/source_dwolla.py new file mode 100644 index 00000000..07498c78 --- /dev/null +++ b/src/airbyte_api/models/source_dwolla.py @@ -0,0 +1,33 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class SourceDwollaEnvironment(str, Enum): + r"""The environment for the Dwolla API, either 'api-sandbox' or 'api'.""" + API = 'api' + API_SANDBOX = 'api-sandbox' + + +class Dwolla(str, Enum): + DWOLLA = 'dwolla' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceDwolla: + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + environment: Optional[SourceDwollaEnvironment] = dataclasses.field(default=SourceDwollaEnvironment.API, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('environment'), 'exclude': lambda f: f is None }}) + r"""The environment for the Dwolla API, either 'api-sandbox' or 'api'.""" + SOURCE_TYPE: Final[Dwolla] = dataclasses.field(default=Dwolla.DWOLLA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_ebay_finance.py b/src/airbyte_api/models/source_ebay_finance.py new file mode 100644 index 00000000..117a3244 --- /dev/null +++ b/src/airbyte_api/models/source_ebay_finance.py @@ -0,0 +1,43 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class APIHost(str, Enum): + r"""https://apiz.sandbox.ebay.com for sandbox & https://apiz.ebay.com for production""" + HTTPS_APIZ_SANDBOX_EBAY_COM = 'https://apiz.sandbox.ebay.com' + HTTPS_APIZ_EBAY_COM = 'https://apiz.ebay.com' + + +class EbayFinance(str, Enum): + EBAY_FINANCE = 'ebay-finance' + + +class RefreshTokenEndpoint(str, Enum): + HTTPS_API_SANDBOX_EBAY_COM_IDENTITY_V1_OAUTH2_TOKEN = 'https://api.sandbox.ebay.com/identity/v1/oauth2/token' + HTTPS_API_EBAY_COM_IDENTITY_V1_OAUTH2_TOKEN = 'https://api.ebay.com/identity/v1/oauth2/token' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceEbayFinance: + redirect_uri: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('redirect_uri') }}) + refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + r"""Ebay Developer Client ID""" + api_host: Optional[APIHost] = dataclasses.field(default=APIHost.HTTPS_APIZ_EBAY_COM, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_host'), 'exclude': lambda f: f is None }}) + r"""https://apiz.sandbox.ebay.com for sandbox & https://apiz.ebay.com for production""" + password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + r"""Ebay Client Secret""" + SOURCE_TYPE: Final[EbayFinance] = dataclasses.field(default=EbayFinance.EBAY_FINANCE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + token_refresh_endpoint: Optional[RefreshTokenEndpoint] = dataclasses.field(default=RefreshTokenEndpoint.HTTPS_API_EBAY_COM_IDENTITY_V1_OAUTH2_TOKEN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token_refresh_endpoint'), 'exclude': lambda f: f is None }}) + + diff --git a/src/airbyte_api/models/source_ebay_fulfillment.py b/src/airbyte_api/models/source_ebay_fulfillment.py new file mode 100644 index 00000000..c832f095 --- /dev/null +++ b/src/airbyte_api/models/source_ebay_fulfillment.py @@ -0,0 +1,39 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class SourceEbayFulfillmentAPIHost(str, Enum): + HTTPS_API_EBAY_COM = 'https://api.ebay.com' + HTTPS_API_SANDBOX_EBAY_COM = 'https://api.sandbox.ebay.com' + + +class SourceEbayFulfillmentRefreshTokenEndpoint(str, Enum): + HTTPS_API_EBAY_COM_IDENTITY_V1_OAUTH2_TOKEN = 'https://api.ebay.com/identity/v1/oauth2/token' + HTTPS_API_SANDBOX_EBAY_COM_IDENTITY_V1_OAUTH2_TOKEN = 'https://api.sandbox.ebay.com/identity/v1/oauth2/token' + + +class EbayFulfillment(str, Enum): + EBAY_FULFILLMENT = 'ebay-fulfillment' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceEbayFulfillment: + password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) + redirect_uri: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('redirect_uri') }}) + refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + api_host: Optional[SourceEbayFulfillmentAPIHost] = dataclasses.field(default=SourceEbayFulfillmentAPIHost.HTTPS_API_EBAY_COM, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_host'), 'exclude': lambda f: f is None }}) + refresh_token_endpoint: Optional[SourceEbayFulfillmentRefreshTokenEndpoint] = dataclasses.field(default=SourceEbayFulfillmentRefreshTokenEndpoint.HTTPS_API_EBAY_COM_IDENTITY_V1_OAUTH2_TOKEN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token_endpoint'), 'exclude': lambda f: f is None }}) + SOURCE_TYPE: Final[EbayFulfillment] = dataclasses.field(default=EbayFulfillment.EBAY_FULFILLMENT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_elasticsearch.py b/src/airbyte_api/models/source_elasticsearch.py new file mode 100644 index 00000000..03998ec1 --- /dev/null +++ b/src/airbyte_api/models/source_elasticsearch.py @@ -0,0 +1,76 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Any, Dict, Final, Optional, Union + + +class SourceElasticsearchSchemasAuthenticationMethodMethod(str, Enum): + BASIC = 'basic' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceElasticsearchUsernamePassword: + r"""Basic auth header with a username and password""" + password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) + r"""Basic auth password to access a secure Elasticsearch server""" + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + r"""Basic auth username to access a secure Elasticsearch server""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + METHOD: Final[SourceElasticsearchSchemasAuthenticationMethodMethod] = dataclasses.field(default=SourceElasticsearchSchemasAuthenticationMethodMethod.BASIC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) + + + + +class SourceElasticsearchSchemasMethod(str, Enum): + SECRET = 'secret' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceElasticsearchAPIKeySecret: + r"""Use a api key and secret combination to authenticate""" + api_key_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('apiKeyId') }}) + r"""The Key ID to used when accessing an enterprise Elasticsearch instance.""" + api_key_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('apiKeySecret') }}) + r"""The secret associated with the API Key ID.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + METHOD: Final[SourceElasticsearchSchemasMethod] = dataclasses.field(default=SourceElasticsearchSchemasMethod.SECRET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) + + + + +class SourceElasticsearchMethod(str, Enum): + NONE = 'none' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceElasticsearchNone: + r"""No authentication will be used""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + METHOD: Final[SourceElasticsearchMethod] = dataclasses.field(default=SourceElasticsearchMethod.NONE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) + + + + +class SourceElasticsearchElasticsearch(str, Enum): + ELASTICSEARCH = 'elasticsearch' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceElasticsearch: + endpoint: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('endpoint') }}) + r"""The full url of the Elasticsearch server""" + authentication_method: Optional[SourceElasticsearchAuthenticationMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authenticationMethod'), 'exclude': lambda f: f is None }}) + r"""The type of authentication to be used""" + SOURCE_TYPE: Final[SourceElasticsearchElasticsearch] = dataclasses.field(default=SourceElasticsearchElasticsearch.ELASTICSEARCH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + + +SourceElasticsearchAuthenticationMethod = Union[SourceElasticsearchNone, SourceElasticsearchAPIKeySecret, SourceElasticsearchUsernamePassword] diff --git a/src/airbyte_api/models/source_everhour.py b/src/airbyte_api/models/source_everhour.py new file mode 100644 index 00000000..d0ac29ee --- /dev/null +++ b/src/airbyte_api/models/source_everhour.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Everhour(str, Enum): + EVERHOUR = 'everhour' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceEverhour: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Everhour API Key. See the docs for information on how to generate this key.""" + SOURCE_TYPE: Final[Everhour] = dataclasses.field(default=Everhour.EVERHOUR, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_facebook_marketing.py b/src/airbyte_api/models/source_facebook_marketing.py index 307f99f1..d2a8dcbc 100644 --- a/src/airbyte_api/models/source_facebook_marketing.py +++ b/src/airbyte_api/models/source_facebook_marketing.py @@ -53,7 +53,7 @@ class SourceFacebookMarketingSchemasAuthType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class ServiceAccountKeyAuthentication: +class SourceFacebookMarketingServiceAccountKeyAuthentication: access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) r"""The value of the generated access token. From your App’s Dashboard, click on \\"Marketing API\\" then \\"Tools\\". Select permissions ads_management, ads_read, read_insights, business_management. Then click on \\"Get token\\". See the docs for more information.""" AUTH_TYPE: Final[Optional[SourceFacebookMarketingSchemasAuthType]] = dataclasses.field(default=SourceFacebookMarketingSchemasAuthType.SERVICE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) @@ -91,24 +91,23 @@ class ValidActionBreakdowns(str, Enum): ACTION_TYPE = 'action_type' ACTION_VIDEO_SOUND = 'action_video_sound' ACTION_VIDEO_TYPE = 'action_video_type' + CONVERSION_DESTINATION = 'conversion_destination' + MATCHED_PERSONA_ID = 'matched_persona_id' + MATCHED_PERSONA_NAME = 'matched_persona_name' + SIGNAL_SOURCE_BUCKET = 'signal_source_bucket' STANDARD_EVENT_CONTENT_TYPE = 'standard_event_content_type' -class SourceFacebookMarketingActionReportTime(str, Enum): - r"""Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd.""" - CONVERSION = 'conversion' - IMPRESSION = 'impression' - MIXED = 'mixed' - - class ValidBreakdowns(str, Enum): r"""An enumeration.""" AD_FORMAT_ASSET = 'ad_format_asset' AGE = 'age' APP_ID = 'app_id' BODY_ASSET = 'body_asset' + BREAKDOWN_REPORTING_AD_ID = 'breakdown_reporting_ad_id' CALL_TO_ACTION_ASSET = 'call_to_action_asset' COARSE_CONVERSION_VALUE = 'coarse_conversion_value' + CONVERSION_DESTINATION = 'conversion_destination' COUNTRY = 'country' DESCRIPTION_ASSET = 'description_asset' DEVICE_PLATFORM = 'device_platform' @@ -122,6 +121,7 @@ class ValidBreakdowns(str, Enum): IMAGE_ASSET = 'image_asset' IMPRESSION_DEVICE = 'impression_device' IS_CONVERSION_ID_MODELED = 'is_conversion_id_modeled' + IS_RENDERED_AS_DELAYED_SKIP_AD = 'is_rendered_as_delayed_skip_ad' LANDING_DESTINATION = 'landing_destination' LINK_URL_ASSET = 'link_url_asset' MARKETING_MESSAGES_BTN_NAME = 'marketing_messages_btn_name' @@ -132,6 +132,7 @@ class ValidBreakdowns(str, Enum): MEDIA_FORMAT = 'media_format' MEDIA_ORIGIN_URL = 'media_origin_url' MEDIA_TEXT_CONTENT = 'media_text_content' + MEDIA_TYPE = 'media_type' MMM = 'mmm' PLACE_PAGE_ID = 'place_page_id' PLATFORM_POSITION = 'platform_position' @@ -140,11 +141,19 @@ class ValidBreakdowns(str, Enum): PUBLISHER_PLATFORM = 'publisher_platform' REDOWNLOAD = 'redownload' REGION = 'region' + SIGNAL_SOURCE_BUCKET = 'signal_source_bucket' SKAN_CAMPAIGN_ID = 'skan_campaign_id' SKAN_CONVERSION_ID = 'skan_conversion_id' SKAN_VERSION = 'skan_version' + SOT_ATTRIBUTION_MODEL_TYPE = 'sot_attribution_model_type' + SOT_ATTRIBUTION_WINDOW = 'sot_attribution_window' + SOT_CHANNEL = 'sot_channel' + SOT_EVENT_TYPE = 'sot_event_type' + SOT_SOURCE = 'sot_source' STANDARD_EVENT_CONTENT_TYPE = 'standard_event_content_type' TITLE_ASSET = 'title_asset' + USER_PERSONA_ID = 'user_persona_id' + USER_PERSONA_NAME = 'user_persona_name' VIDEO_ASSET = 'video_asset' @@ -167,6 +176,7 @@ class SourceFacebookMarketingValidEnums(str, Enum): AUCTION_BID = 'auction_bid' AUCTION_COMPETITIVENESS = 'auction_competitiveness' AUCTION_MAX_COMPETITOR_BID = 'auction_max_competitor_bid' + AVERAGE_PURCHASES_CONVERSION_VALUE = 'average_purchases_conversion_value' BUYING_TYPE = 'buying_type' CAMPAIGN_ID = 'campaign_id' CAMPAIGN_NAME = 'campaign_name' @@ -234,9 +244,17 @@ class SourceFacebookMarketingValidEnums(str, Enum): LOCATION = 'location' MARKETING_MESSAGES_COST_PER_DELIVERED = 'marketing_messages_cost_per_delivered' MARKETING_MESSAGES_COST_PER_LINK_BTN_CLICK = 'marketing_messages_cost_per_link_btn_click' + MARKETING_MESSAGES_DELIVERY_RATE = 'marketing_messages_delivery_rate' + MARKETING_MESSAGES_LINK_BTN_CLICK_RATE = 'marketing_messages_link_btn_click_rate' + MARKETING_MESSAGES_MEDIA_VIEW_RATE = 'marketing_messages_media_view_rate' + MARKETING_MESSAGES_PHONE_CALL_BTN_CLICK_RATE = 'marketing_messages_phone_call_btn_click_rate' + MARKETING_MESSAGES_QUICK_REPLY_BTN_CLICK_RATE = 'marketing_messages_quick_reply_btn_click_rate' + MARKETING_MESSAGES_READ_RATE = 'marketing_messages_read_rate' MARKETING_MESSAGES_SPEND = 'marketing_messages_spend' + MARKETING_MESSAGES_WEBSITE_PURCHASE_VALUES = 'marketing_messages_website_purchase_values' MOBILE_APP_PURCHASE_ROAS = 'mobile_app_purchase_roas' OBJECTIVE = 'objective' + ONSITE_CONVERSION_MESSAGING_DETECTED_PURCHASE_DEDUPED = 'onsite_conversion_messaging_detected_purchase_deduped' OPTIMIZATION_GOAL = 'optimization_goal' OUTBOUND_CLICKS = 'outbound_clicks' OUTBOUND_CLICKS_CTR = 'outbound_clicks_ctr' @@ -245,6 +263,7 @@ class SourceFacebookMarketingValidEnums(str, Enum): QUALIFYING_QUESTION_QUALIFY_ANSWER_RATE = 'qualifying_question_qualify_answer_rate' QUALITY_RANKING = 'quality_ranking' REACH = 'reach' + SHOPS_ASSISTED_PURCHASES = 'shops_assisted_purchases' SOCIAL_SPEND = 'social_spend' SPEND = 'spend' TOTAL_POSTBACKS = 'total_postbacks' @@ -299,8 +318,6 @@ class InsightConfig: r"""The name value of insight""" action_breakdowns: Optional[List[ValidActionBreakdowns]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('action_breakdowns'), 'exclude': lambda f: f is None }}) r"""A list of chosen action_breakdowns for action_breakdowns""" - action_report_time: Optional[SourceFacebookMarketingActionReportTime] = dataclasses.field(default=SourceFacebookMarketingActionReportTime.MIXED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('action_report_time'), 'exclude': lambda f: f is None }}) - r"""Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd.""" breakdowns: Optional[List[ValidBreakdowns]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('breakdowns'), 'exclude': lambda f: f is None }}) r"""A list of chosen breakdowns for breakdowns""" end_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) @@ -358,4 +375,4 @@ class SourceFacebookMarketing: -SourceFacebookMarketingAuthentication = Union[AuthenticateViaFacebookMarketingOauth, ServiceAccountKeyAuthentication] +SourceFacebookMarketingAuthentication = Union[AuthenticateViaFacebookMarketingOauth, SourceFacebookMarketingServiceAccountKeyAuthentication] diff --git a/src/airbyte_api/models/source_facebook_pages.py b/src/airbyte_api/models/source_facebook_pages.py new file mode 100644 index 00000000..93c571e6 --- /dev/null +++ b/src/airbyte_api/models/source_facebook_pages.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class FacebookPages(str, Enum): + FACEBOOK_PAGES = 'facebook-pages' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceFacebookPages: + access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) + r"""Facebook Page Access Token""" + page_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('page_id') }}) + r"""Page ID""" + SOURCE_TYPE: Final[FacebookPages] = dataclasses.field(default=FacebookPages.FACEBOOK_PAGES, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_fastbill.py b/src/airbyte_api/models/source_fastbill.py new file mode 100644 index 00000000..99679989 --- /dev/null +++ b/src/airbyte_api/models/source_fastbill.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Fastbill(str, Enum): + FASTBILL = 'fastbill' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceFastbill: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Fastbill API key""" + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + r"""Username for Fastbill account""" + SOURCE_TYPE: Final[Fastbill] = dataclasses.field(default=Fastbill.FASTBILL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_fastly.py b/src/airbyte_api/models/source_fastly.py new file mode 100644 index 00000000..72a5ec1a --- /dev/null +++ b/src/airbyte_api/models/source_fastly.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Fastly(str, Enum): + FASTLY = 'fastly' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceFastly: + fastly_api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('fastly_api_token') }}) + r"""Your Fastly API token. You can generate this token in the Fastly web interface under Account Settings or via the Fastly API. Ensure the token has the appropriate scope for your use case.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Fastly] = dataclasses.field(default=Fastly.FASTLY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_fauna.py b/src/airbyte_api/models/source_fauna.py index 349ff772..35b151e5 100644 --- a/src/airbyte_api/models/source_fauna.py +++ b/src/airbyte_api/models/source_fauna.py @@ -8,7 +8,7 @@ from typing import Final, Optional, Union -class SourceFaunaSchemasDeletionMode(str, Enum): +class SourceFaunaDeletionMode(str, Enum): DELETED_FIELD = 'deleted_field' @@ -17,19 +17,19 @@ class SourceFaunaSchemasDeletionMode(str, Enum): class Enabled: column: Optional[str] = dataclasses.field(default='deleted_at', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('column'), 'exclude': lambda f: f is None }}) r"""Name of the \\"deleted at\\" column.""" - DELETION_MODE: Final[SourceFaunaSchemasDeletionMode] = dataclasses.field(default=SourceFaunaSchemasDeletionMode.DELETED_FIELD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('deletion_mode') }}) + DELETION_MODE: Final[SourceFaunaDeletionMode] = dataclasses.field(default=SourceFaunaDeletionMode.DELETED_FIELD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('deletion_mode') }}) -class SourceFaunaDeletionMode(str, Enum): +class SourceFaunaSchemasDeletionMode(str, Enum): IGNORE = 'ignore' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class Disabled: - DELETION_MODE: Final[SourceFaunaDeletionMode] = dataclasses.field(default=SourceFaunaDeletionMode.IGNORE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('deletion_mode') }}) + DELETION_MODE: Final[SourceFaunaSchemasDeletionMode] = dataclasses.field(default=SourceFaunaSchemasDeletionMode.IGNORE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('deletion_mode') }}) diff --git a/src/airbyte_api/models/source_file.py b/src/airbyte_api/models/source_file.py index 43bb578c..f0374bbd 100644 --- a/src/airbyte_api/models/source_file.py +++ b/src/airbyte_api/models/source_file.py @@ -8,7 +8,7 @@ from typing import Final, Optional, Union -class SourceFileFileFormat(str, Enum): +class FileFormat(str, Enum): r"""The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs).""" CSV = 'csv' JSON = 'json' @@ -158,7 +158,7 @@ class SourceFile: r"""The storage Provider or Location of the file(s) which should be replicated.""" url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url') }}) r"""The URL path to access the file which should be replicated.""" - format: Optional[SourceFileFileFormat] = dataclasses.field(default=SourceFileFileFormat.CSV, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format'), 'exclude': lambda f: f is None }}) + format: Optional[FileFormat] = dataclasses.field(default=FileFormat.CSV, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format'), 'exclude': lambda f: f is None }}) r"""The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs).""" reader_options: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('reader_options'), 'exclude': lambda f: f is None }}) r"""This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.""" diff --git a/src/airbyte_api/models/source_fullstory.py b/src/airbyte_api/models/source_fullstory.py new file mode 100644 index 00000000..51a2e02d --- /dev/null +++ b/src/airbyte_api/models/source_fullstory.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Fullstory(str, Enum): + FULLSTORY = 'fullstory' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceFullstory: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""API Key for the fullstory.com API.""" + uid: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('uid') }}) + r"""User ID for the fullstory.com API.""" + SOURCE_TYPE: Final[Fullstory] = dataclasses.field(default=Fullstory.FULLSTORY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_gcs.py b/src/airbyte_api/models/source_gcs.py index a40395e1..ccccf34a 100644 --- a/src/airbyte_api/models/source_gcs.py +++ b/src/airbyte_api/models/source_gcs.py @@ -30,7 +30,7 @@ class SourceGcsAuthType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class AuthenticateViaGoogleOAuth: +class SourceGcsAuthenticateViaGoogleOAuth: access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) r"""Access Token""" client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) @@ -117,7 +117,7 @@ class SourceGcsParsingStrategy(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class UnstructuredDocumentFormat: +class SourceGcsUnstructuredDocumentFormat: r"""Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.""" FILETYPE: Final[Optional[SourceGcsSchemasStreamsFormatFormatFiletype]] = dataclasses.field(default=SourceGcsSchemasStreamsFormatFormatFiletype.UNSTRUCTURED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) processing: Optional[SourceGcsProcessing] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('processing'), 'exclude': lambda f: f is None }}) @@ -295,10 +295,10 @@ class SourceGcs: -SourceGcsAuthentication = Union[AuthenticateViaGoogleOAuth, ServiceAccountAuthentication] +SourceGcsAuthentication = Union[SourceGcsAuthenticateViaGoogleOAuth, ServiceAccountAuthentication] SourceGcsProcessing = Union[SourceGcsLocal, ViaAPI] SourceGcsCSVHeaderDefinition = Union[SourceGcsFromCSV, SourceGcsAutogenerated, SourceGcsUserProvided] -SourceGcsFormat = Union[SourceGcsAvroFormat, SourceGcsCSVFormat, SourceGcsJsonlFormat, SourceGcsParquetFormat, UnstructuredDocumentFormat, ExcelFormat] +SourceGcsFormat = Union[SourceGcsAvroFormat, SourceGcsCSVFormat, SourceGcsJsonlFormat, SourceGcsParquetFormat, SourceGcsUnstructuredDocumentFormat, ExcelFormat] diff --git a/src/airbyte_api/models/source_giphy.py b/src/airbyte_api/models/source_giphy.py new file mode 100644 index 00000000..fafd2748 --- /dev/null +++ b/src/airbyte_api/models/source_giphy.py @@ -0,0 +1,33 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Giphy(str, Enum): + GIPHY = 'giphy' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceGiphy: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your GIPHY API Key. You can create and find your API key in the GIPHY Developer Dashboard at https://developers.giphy.com/dashboard/.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + query: Optional[str] = dataclasses.field(default='foo', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query'), 'exclude': lambda f: f is None }}) + r"""A query for search endpoint""" + query_for_clips: Optional[str] = dataclasses.field(default='foo', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query_for_clips'), 'exclude': lambda f: f is None }}) + r"""Query for clips search endpoint""" + query_for_gif: Optional[str] = dataclasses.field(default='foo', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query_for_gif'), 'exclude': lambda f: f is None }}) + r"""Query for gif search endpoint""" + query_for_stickers: Optional[str] = dataclasses.field(default='foo', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query_for_stickers'), 'exclude': lambda f: f is None }}) + r"""Query for stickers search endpoint""" + SOURCE_TYPE: Final[Giphy] = dataclasses.field(default=Giphy.GIPHY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_github.py b/src/airbyte_api/models/source_github.py index 5e15d04f..0b21fccc 100644 --- a/src/airbyte_api/models/source_github.py +++ b/src/airbyte_api/models/source_github.py @@ -30,7 +30,7 @@ class OptionTitle(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class OAuth: +class SourceGithubOAuth: access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) r"""OAuth access token""" client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }}) @@ -65,4 +65,4 @@ class SourceGithub: -SourceGithubAuthentication = Union[OAuth, SourceGithubPersonalAccessToken] +SourceGithubAuthentication = Union[SourceGithubOAuth, SourceGithubPersonalAccessToken] diff --git a/src/airbyte_api/models/source_gologin.py b/src/airbyte_api/models/source_gologin.py new file mode 100644 index 00000000..5f61b5e3 --- /dev/null +++ b/src/airbyte_api/models/source_gologin.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Gologin(str, Enum): + GOLOGIN = 'gologin' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceGologin: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""API Key found at `https://app.gologin.com/personalArea/TokenApi`""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Gologin] = dataclasses.field(default=Gologin.GOLOGIN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_google_analytics_data_api.py b/src/airbyte_api/models/source_google_analytics_data_api.py index 84725410..107ddf51 100644 --- a/src/airbyte_api/models/source_google_analytics_data_api.py +++ b/src/airbyte_api/models/source_google_analytics_data_api.py @@ -1562,6 +1562,8 @@ class SourceGoogleAnalyticsDataAPI: r"""Credentials for the service""" custom_reports_array: Optional[List[SourceGoogleAnalyticsDataAPICustomReportConfig]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_reports_array'), 'exclude': lambda f: f is None }}) r"""You can add your Custom Analytics report by creating one.""" + date_ranges_end_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('date_ranges_end_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) + r"""The end date from which to replicate report data in the format YYYY-MM-DD. Data generated after this date will not be included in the report. Not applied to custom Cohort reports. When no date is provided or the date is in the future, the date from today is used.""" date_ranges_start_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('date_ranges_start_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) r"""The start date from which to replicate report data in the format YYYY-MM-DD. Data generated before this date will not be included in the report. Not applied to custom Cohort reports.""" keep_empty_rows: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('keep_empty_rows'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_google_drive.py b/src/airbyte_api/models/source_google_drive.py index 0fe8f92b..65c968fc 100644 --- a/src/airbyte_api/models/source_google_drive.py +++ b/src/airbyte_api/models/source_google_drive.py @@ -42,10 +42,67 @@ class SourceGoogleDriveAuthenticateViaGoogleOAuth: +class SourceGoogleDriveSchemasDeliveryType(str, Enum): + USE_PERMISSIONS_TRANSFER = 'use_permissions_transfer' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class ReplicatePermissionsACL: + r"""Sends one identity stream and one for more permissions (ACL) streams to the destination. This data can be used in downstream systems to recreate permission restrictions mirroring the original source.""" + DELIVERY_TYPE: Final[Optional[SourceGoogleDriveSchemasDeliveryType]] = dataclasses.field(default=SourceGoogleDriveSchemasDeliveryType.USE_PERMISSIONS_TRANSFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_type'), 'exclude': lambda f: f is None }}) + domain: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain'), 'exclude': lambda f: f is None }}) + r"""The Google domain of the identities.""" + include_identities_stream: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('include_identities_stream'), 'exclude': lambda f: f is None }}) + r"""This data can be used in downstream systems to recreate permission restrictions mirroring the original source""" + + + + +class SourceGoogleDriveDeliveryType(str, Enum): + USE_FILE_TRANSFER = 'use_file_transfer' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class CopyRawFiles: + r"""Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.""" + DELIVERY_TYPE: Final[Optional[SourceGoogleDriveDeliveryType]] = dataclasses.field(default=SourceGoogleDriveDeliveryType.USE_FILE_TRANSFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_type'), 'exclude': lambda f: f is None }}) + preserve_directory_structure: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('preserve_directory_structure'), 'exclude': lambda f: f is None }}) + r"""If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled.""" + + + + +class DeliveryType(str, Enum): + USE_RECORDS_TRANSFER = 'use_records_transfer' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class ReplicateRecords: + r"""Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.""" + DELIVERY_TYPE: Final[Optional[DeliveryType]] = dataclasses.field(default=DeliveryType.USE_RECORDS_TRANSFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_type'), 'exclude': lambda f: f is None }}) + + + + class SourceGoogleDriveGoogleDrive(str, Enum): GOOGLE_DRIVE = 'google-drive' +class SourceGoogleDriveSchemasStreamsFormatFormat6Filetype(str, Enum): + EXCEL = 'excel' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceGoogleDriveExcelFormat: + FILETYPE: Final[Optional[SourceGoogleDriveSchemasStreamsFormatFormat6Filetype]] = dataclasses.field(default=SourceGoogleDriveSchemasStreamsFormatFormat6Filetype.EXCEL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) + + + + class SourceGoogleDriveSchemasStreamsFormatFormatFiletype(str, Enum): UNSTRUCTURED = 'unstructured' @@ -73,7 +130,7 @@ class SourceGoogleDriveParsingStrategy(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class SourceGoogleDriveDocumentFileTypeFormatExperimental: +class SourceGoogleDriveUnstructuredDocumentFormat: r"""Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.""" FILETYPE: Final[Optional[SourceGoogleDriveSchemasStreamsFormatFormatFiletype]] = dataclasses.field(default=SourceGoogleDriveSchemasStreamsFormatFormatFiletype.UNSTRUCTURED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) processing: Optional[SourceGoogleDriveProcessing] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('processing'), 'exclude': lambda f: f is None }}) @@ -222,6 +279,8 @@ class SourceGoogleDriveFileBasedStreamConfig: r"""The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.""" input_schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('input_schema'), 'exclude': lambda f: f is None }}) r"""The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.""" + recent_n_files_to_read_for_schema_discovery: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('recent_n_files_to_read_for_schema_discovery'), 'exclude': lambda f: f is None }}) + r"""The number of resent files which will be used to discover the schema for this stream.""" schemaless: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schemaless'), 'exclude': lambda f: f is None }}) r"""When enabled, syncs will not validate or structure records against the stream's schema.""" validation_policy: Optional[SourceGoogleDriveValidationPolicy] = dataclasses.field(default=SourceGoogleDriveValidationPolicy.EMIT_RECORD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('validation_policy'), 'exclude': lambda f: f is None }}) @@ -242,6 +301,7 @@ class SourceGoogleDrive: r"""URL for the folder you want to sync. Using individual streams and glob patterns, it's possible to only sync a subset of all files located in the folder.""" streams: List[SourceGoogleDriveFileBasedStreamConfig] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('streams') }}) r"""Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.""" + delivery_method: Optional[DeliveryMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_method'), 'exclude': lambda f: f is None }}) SOURCE_TYPE: Final[SourceGoogleDriveGoogleDrive] = dataclasses.field(default=SourceGoogleDriveGoogleDrive.GOOGLE_DRIVE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) r"""UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.""" @@ -250,8 +310,10 @@ class SourceGoogleDrive: SourceGoogleDriveAuthentication = Union[SourceGoogleDriveAuthenticateViaGoogleOAuth, SourceGoogleDriveServiceAccountKeyAuthentication] +DeliveryMethod = Union[ReplicateRecords, CopyRawFiles, ReplicatePermissionsACL] + SourceGoogleDriveProcessing = Union[SourceGoogleDriveLocal] SourceGoogleDriveCSVHeaderDefinition = Union[SourceGoogleDriveFromCSV, SourceGoogleDriveAutogenerated, SourceGoogleDriveUserProvided] -SourceGoogleDriveFormat = Union[SourceGoogleDriveAvroFormat, SourceGoogleDriveCSVFormat, SourceGoogleDriveJsonlFormat, SourceGoogleDriveParquetFormat, SourceGoogleDriveDocumentFileTypeFormatExperimental] +SourceGoogleDriveFormat = Union[SourceGoogleDriveAvroFormat, SourceGoogleDriveCSVFormat, SourceGoogleDriveJsonlFormat, SourceGoogleDriveParquetFormat, SourceGoogleDriveUnstructuredDocumentFormat, SourceGoogleDriveExcelFormat] diff --git a/src/airbyte_api/models/source_google_search_console.py b/src/airbyte_api/models/source_google_search_console.py index cc7400a8..e31bb7e0 100644 --- a/src/airbyte_api/models/source_google_search_console.py +++ b/src/airbyte_api/models/source_google_search_console.py @@ -79,19 +79,23 @@ class SourceGoogleSearchConsoleGoogleSearchConsole(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceGoogleSearchConsole: - authorization: AuthenticationType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authorization') }}) + authorization: SourceGoogleSearchConsoleAuthenticationType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authorization') }}) site_urls: List[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('site_urls') }}) r"""The URLs of the website property attached to your GSC account. Learn more about properties here.""" + always_use_aggregation_type_auto: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('always_use_aggregation_type_auto'), 'exclude': lambda f: f is None }}) + r"""Some search analytics streams fail with a 400 error if the specified `aggregationType` is not supported. This is customer implementation dependent and if this error is encountered, enable this setting which will override the existing `aggregationType` to use `auto` which should resolve the stream errors.""" custom_reports_array: Optional[List[SourceGoogleSearchConsoleCustomReportConfig]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_reports_array'), 'exclude': lambda f: f is None }}) r"""You can add your Custom Analytics report by creating one.""" data_state: Optional[DataFreshness] = dataclasses.field(default=DataFreshness.FINAL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_state'), 'exclude': lambda f: f is None }}) r"""If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation.""" end_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) r"""UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward.""" + num_workers: Optional[int] = dataclasses.field(default=40, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_workers'), 'exclude': lambda f: f is None }}) + r"""The number of worker threads to use for the sync. For more details on Google Search Console rate limits, refer to the docs.""" SOURCE_TYPE: Final[SourceGoogleSearchConsoleGoogleSearchConsole] = dataclasses.field(default=SourceGoogleSearchConsoleGoogleSearchConsole.GOOGLE_SEARCH_CONSOLE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[date] = dataclasses.field(default=dateutil.parser.parse('2021-01-01').date(), metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) r"""UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.""" -AuthenticationType = Union[SourceGoogleSearchConsoleOAuth, SourceGoogleSearchConsoleServiceAccountKeyAuthentication] +SourceGoogleSearchConsoleAuthenticationType = Union[SourceGoogleSearchConsoleOAuth, SourceGoogleSearchConsoleServiceAccountKeyAuthentication] diff --git a/src/airbyte_api/models/source_google_sheets.py b/src/airbyte_api/models/source_google_sheets.py index 8373442a..4dc75637 100644 --- a/src/airbyte_api/models/source_google_sheets.py +++ b/src/airbyte_api/models/source_google_sheets.py @@ -5,7 +5,7 @@ from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from enum import Enum -from typing import Final, Optional, Union +from typing import Final, List, Optional, Union class SourceGoogleSheetsSchemasAuthType(str, Enum): @@ -44,6 +44,17 @@ class SourceGoogleSheetsGoogleSheets(str, Enum): GOOGLE_SHEETS = 'google-sheets' +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class StreamNameOverrides: + custom_stream_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_stream_name') }}) + r"""The name you want this stream to appear as in Airbyte and your destination.""" + source_stream_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('source_stream_name') }}) + r"""The exact name of the sheet/tab in your Google Spreadsheet.""" + + + + @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceGoogleSheets: @@ -51,11 +62,35 @@ class SourceGoogleSheets: r"""Credentials for connecting to the Google Sheets API""" spreadsheet_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('spreadsheet_id') }}) r"""Enter the link to the Google spreadsheet you want to sync. To copy the link, click the 'Share' button in the top-right corner of the spreadsheet, then click 'Copy link'.""" - batch_size: Optional[int] = dataclasses.field(default=200, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('batch_size'), 'exclude': lambda f: f is None }}) - r"""Default value is 200. An integer representing row batch size for each sent request to Google Sheets API. Row batch size means how many rows are processed from the google sheet, for example default value 200 would process rows 1-201, then 201-401 and so on. Based on Google Sheets API limits documentation, it is possible to send up to 300 requests per minute, but each individual request has to be processed under 180 seconds, otherwise the request returns a timeout error. In regards to this information, consider network speed and number of columns of the google sheet when deciding a batch_size value. Default value should cover most of the cases, but if a google sheet has over 100,000 records or more, consider increasing batch_size value.""" + allow_leading_numbers: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('allow_leading_numbers'), 'exclude': lambda f: f is None }}) + r"""Allows column names to start with numbers. Example: \\"50th Percentile\\" → \\"50_th_percentile\\" This option will only work if \\"Convert Column Names to SQL-Compliant Format (names_conversion)\\" is enabled.""" + batch_size: Optional[int] = dataclasses.field(default=1000000, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('batch_size'), 'exclude': lambda f: f is None }}) + r"""Default value is 1000000. An integer representing row batch size for each sent request to Google Sheets API. Row batch size means how many rows are processed from the google sheet, for example default value 1000000 would process rows 2-1000002, then 1000003-2000003 and so on. Based on Google Sheets API limits documentation, it is possible to send up to 300 requests per minute, but each individual request has to be processed under 180 seconds, otherwise the request returns a timeout error. In regards to this information, consider network speed and number of columns of the google sheet when deciding a batch_size value.""" + combine_letter_number_pairs: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('combine_letter_number_pairs'), 'exclude': lambda f: f is None }}) + r"""Combines adjacent letters and numbers. Example: \\"Q3 2023\\" → \\"q3_2023\\" This option will only work if \\"Convert Column Names to SQL-Compliant Format (names_conversion)\\" is enabled.""" + combine_number_word_pairs: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('combine_number_word_pairs'), 'exclude': lambda f: f is None }}) + r"""Combines adjacent numbers and words. Example: \\"50th Percentile?\\" → \\"_50th_percentile_\\" This option will only work if \\"Convert Column Names to SQL-Compliant Format (names_conversion)\\" is enabled.""" names_conversion: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('names_conversion'), 'exclude': lambda f: f is None }}) - r"""Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based.""" + r"""Converts column names to a SQL-compliant format (snake_case, lowercase, etc). If enabled, you can further customize the sanitization using the options below.""" + remove_leading_trailing_underscores: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('remove_leading_trailing_underscores'), 'exclude': lambda f: f is None }}) + r"""Removes leading and trailing underscores from column names. Does not remove leading underscores from column names that start with a number. Example: \\"50th Percentile? \\"→ \\"_50_th_percentile\\" This option will only work if \\"Convert Column Names to SQL-Compliant Format (names_conversion)\\" is enabled.""" + remove_special_characters: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('remove_special_characters'), 'exclude': lambda f: f is None }}) + r"""Removes all special characters from column names. Example: \\"Example ID*\\" → \\"example_id\\" This option will only work if \\"Convert Column Names to SQL-Compliant Format (names_conversion)\\" is enabled.""" SOURCE_TYPE: Final[SourceGoogleSheetsGoogleSheets] = dataclasses.field(default=SourceGoogleSheetsGoogleSheets.GOOGLE_SHEETS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + stream_name_overrides: Optional[List[StreamNameOverrides]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('stream_name_overrides'), 'exclude': lambda f: f is None }}) + r"""**Overridden streams will default to Sync Mode: Full Refresh (Append), which does not support primary keys. If you want to use primary keys and deduplication, update the sync mode to \\"Full Refresh | Overwrite + Deduped\\" in your connection settings.** + Allows you to rename streams (Google Sheet tab names) as they appear in Airbyte. + Each item should be an object with a `source_stream_name` (the exact name of the sheet/tab in your spreadsheet) and a `custom_stream_name` (the name you want it to appear as in Airbyte and the destination). + If a `source_stream_name` is not found in your spreadsheet, it will be ignored and the default name will be used. This feature only affects stream (sheet/tab) names, not field/column names. + If you want to rename fields or column names, you can do so using the Airbyte Mappings feature after your connection is created. See the Airbyte documentation for more details on how to use Mappings. + Examples: + - To rename a sheet called \"Sheet1\" to \"sales_data\", and \"2024 Q1\" to \"q1_2024\": + [ + { \"source_stream_name\": \"Sheet1\", \"custom_stream_name\": \"sales_data\" }, + { \"source_stream_name\": \"2024 Q1\", \"custom_stream_name\": \"q1_2024\" } + ] + - If you do not wish to rename any streams, leave this blank. + """ diff --git a/src/airbyte_api/models/source_greythr.py b/src/airbyte_api/models/source_greythr.py new file mode 100644 index 00000000..3671aef5 --- /dev/null +++ b/src/airbyte_api/models/source_greythr.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class Greythr(str, Enum): + GREYTHR = 'greythr' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceGreythr: + base_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('base_url') }}) + r"""https://api.greythr.com""" + domain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain') }}) + r"""Your GreytHR Host URL""" + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + SOURCE_TYPE: Final[Greythr] = dataclasses.field(default=Greythr.GREYTHR, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_harness.py b/src/airbyte_api/models/source_harness.py new file mode 100644 index 00000000..1e401ce0 --- /dev/null +++ b/src/airbyte_api/models/source_harness.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class Harness(str, Enum): + HARNESS = 'harness' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceHarness: + account_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('account_id') }}) + r"""Harness Account ID""" + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + api_url: Optional[str] = dataclasses.field(default='https://app.harness.io', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_url'), 'exclude': lambda f: f is None }}) + r"""The API URL for fetching data from Harness""" + SOURCE_TYPE: Final[Harness] = dataclasses.field(default=Harness.HARNESS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_hellobaton.py b/src/airbyte_api/models/source_hellobaton.py new file mode 100644 index 00000000..3d0676d2 --- /dev/null +++ b/src/airbyte_api/models/source_hellobaton.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Hellobaton(str, Enum): + HELLOBATON = 'hellobaton' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceHellobaton: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""authentication key required to access the api endpoints""" + company: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('company') }}) + r"""Company name that generates your base api url""" + SOURCE_TYPE: Final[Hellobaton] = dataclasses.field(default=Hellobaton.HELLOBATON, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_help_scout.py b/src/airbyte_api/models/source_help_scout.py new file mode 100644 index 00000000..e05b82e4 --- /dev/null +++ b/src/airbyte_api/models/source_help_scout.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class HelpScout(str, Enum): + HELP_SCOUT = 'help-scout' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceHelpScout: + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[HelpScout] = dataclasses.field(default=HelpScout.HELP_SCOUT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_hoorayhr.py b/src/airbyte_api/models/source_hoorayhr.py new file mode 100644 index 00000000..acf33de0 --- /dev/null +++ b/src/airbyte_api/models/source_hoorayhr.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Hoorayhr(str, Enum): + HOORAYHR = 'hoorayhr' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceHoorayhr: + hoorayhrpassword: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hoorayhrpassword') }}) + hoorayhrusername: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hoorayhrusername') }}) + SOURCE_TYPE: Final[Hoorayhr] = dataclasses.field(default=Hoorayhr.HOORAYHR, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_hubspot.py b/src/airbyte_api/models/source_hubspot.py index 92bdfa20..0a5ac039 100644 --- a/src/airbyte_api/models/source_hubspot.py +++ b/src/airbyte_api/models/source_hubspot.py @@ -57,6 +57,8 @@ class SourceHubspot: r"""Choose how to authenticate to HubSpot.""" enable_experimental_streams: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('enable_experimental_streams'), 'exclude': lambda f: f is None }}) r"""If enabled then experimental streams become available for sync.""" + num_worker: Optional[int] = dataclasses.field(default=3, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_worker'), 'exclude': lambda f: f is None }}) + r"""The number of worker threads to use for the sync.""" SOURCE_TYPE: Final[SourceHubspotHubspot] = dataclasses.field(default=SourceHubspotHubspot.HUBSPOT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. If not set, \\"2006-06-01T00:00:00Z\\" (Hubspot creation date) will be used as start date. It's recommended to provide relevant to your data start date value to optimize synchronization.""" diff --git a/src/airbyte_api/models/source_hugging_face_datasets.py b/src/airbyte_api/models/source_hugging_face_datasets.py new file mode 100644 index 00000000..e32e923b --- /dev/null +++ b/src/airbyte_api/models/source_hugging_face_datasets.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Any, Final, List, Optional + + +class HuggingFaceDatasets(str, Enum): + HUGGING_FACE_DATASETS = 'hugging-face-datasets' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceHuggingFaceDatasets: + dataset_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataset_name') }}) + dataset_splits: Optional[List[Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataset_splits'), 'exclude': lambda f: f is None }}) + r"""Splits to import. Will import all of them if nothing is provided (see https://huggingface.co/docs/dataset-viewer/en/configs_and_splits for more details)""" + dataset_subsets: Optional[List[Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataset_subsets'), 'exclude': lambda f: f is None }}) + r"""Dataset Subsets to import. Will import all of them if nothing is provided (see https://huggingface.co/docs/dataset-viewer/en/configs_and_splits for more details)""" + SOURCE_TYPE: Final[HuggingFaceDatasets] = dataclasses.field(default=HuggingFaceDatasets.HUGGING_FACE_DATASETS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_huntr.py b/src/airbyte_api/models/source_huntr.py new file mode 100644 index 00000000..4d28b69b --- /dev/null +++ b/src/airbyte_api/models/source_huntr.py @@ -0,0 +1,21 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Huntr(str, Enum): + HUNTR = 'huntr' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceHuntr: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + SOURCE_TYPE: Final[Huntr] = dataclasses.field(default=Huntr.HUNTR, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_imagga.py b/src/airbyte_api/models/source_imagga.py new file mode 100644 index 00000000..ce4a7817 --- /dev/null +++ b/src/airbyte_api/models/source_imagga.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class Imagga(str, Enum): + IMAGGA = 'imagga' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceImagga: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your Imagga API key, available in your Imagga dashboard. Could be found at `https://imagga.com/profile/dashboard`""" + api_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_secret') }}) + r"""Your Imagga API secret, available in your Imagga dashboard. Could be found at `https://imagga.com/profile/dashboard`""" + img_for_detection: Optional[str] = dataclasses.field(default='https://imagga.com/static/images/categorization/child-476506_640.jpg', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('img_for_detection'), 'exclude': lambda f: f is None }}) + r"""An image for detection endpoints""" + SOURCE_TYPE: Final[Imagga] = dataclasses.field(default=Imagga.IMAGGA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_insightful.py b/src/airbyte_api/models/source_insightful.py new file mode 100644 index 00000000..0daa6faf --- /dev/null +++ b/src/airbyte_api/models/source_insightful.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Insightful(str, Enum): + INSIGHTFUL = 'insightful' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceInsightful: + api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }}) + r"""Your API token for accessing the Insightful API. Generate it by logging in as an Admin to your organization's account, navigating to the API page, and creating a new token. Note that this token will only be shown once, so store it securely.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Insightful] = dataclasses.field(default=Insightful.INSIGHTFUL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_instagram.py b/src/airbyte_api/models/source_instagram.py index 747a3252..994b80f3 100644 --- a/src/airbyte_api/models/source_instagram.py +++ b/src/airbyte_api/models/source_instagram.py @@ -19,6 +19,12 @@ class SourceInstagramInstagram(str, Enum): class SourceInstagram: access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) r"""The value of the access token generated with instagram_basic, instagram_manage_insights, pages_show_list, pages_read_engagement, Instagram Public Content Access permissions. See the docs for more information""" + client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }}) + r"""The Client ID for your Oauth application""" + client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }}) + r"""The Client Secret for your Oauth application""" + num_workers: Optional[int] = dataclasses.field(default=15, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_workers'), 'exclude': lambda f: f is None }}) + r"""The number of worker threads to use for the sync.""" SOURCE_TYPE: Final[SourceInstagramInstagram] = dataclasses.field(default=SourceInstagramInstagram.INSTAGRAM, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) r"""The date from which you'd like to replicate data for User Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. If left blank, the start date will be set to 2 years before the present date.""" diff --git a/src/airbyte_api/models/source_intercom.py b/src/airbyte_api/models/source_intercom.py index 202ccffb..3b05ab5a 100644 --- a/src/airbyte_api/models/source_intercom.py +++ b/src/airbyte_api/models/source_intercom.py @@ -10,7 +10,7 @@ from typing import Final, Optional -class SourceIntercomIntercom(str, Enum): +class Intercom(str, Enum): INTERCOM = 'intercom' @@ -29,6 +29,6 @@ class SourceIntercom: r"""Client Secret for your Intercom application.""" lookback_window: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window'), 'exclude': lambda f: f is None }}) r"""The number of days to shift the state value backward for record sync""" - SOURCE_TYPE: Final[SourceIntercomIntercom] = dataclasses.field(default=SourceIntercomIntercom.INTERCOM, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + SOURCE_TYPE: Final[Intercom] = dataclasses.field(default=Intercom.INTERCOM, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_intruder.py b/src/airbyte_api/models/source_intruder.py new file mode 100644 index 00000000..f0c0317f --- /dev/null +++ b/src/airbyte_api/models/source_intruder.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Intruder(str, Enum): + INTRUDER = 'intruder' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceIntruder: + access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) + r"""Your API Access token. See here.""" + SOURCE_TYPE: Final[Intruder] = dataclasses.field(default=Intruder.INTRUDER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_jamf_pro.py b/src/airbyte_api/models/source_jamf_pro.py new file mode 100644 index 00000000..637f8665 --- /dev/null +++ b/src/airbyte_api/models/source_jamf_pro.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class JamfPro(str, Enum): + JAMF_PRO = 'jamf-pro' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceJamfPro: + subdomain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('subdomain') }}) + r"""The unique subdomain for your Jamf Pro instance.""" + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + SOURCE_TYPE: Final[JamfPro] = dataclasses.field(default=JamfPro.JAMF_PRO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_jira.py b/src/airbyte_api/models/source_jira.py index 8c67eb4f..d9d11f49 100644 --- a/src/airbyte_api/models/source_jira.py +++ b/src/airbyte_api/models/source_jira.py @@ -23,8 +23,6 @@ class SourceJira: r"""The Domain for your Jira account, e.g. airbyteio.atlassian.net, airbyteio.jira.com, jira.your-domain.com""" email: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('email') }}) r"""The user email for your Jira account which you used to generate the API token. This field is used for Authorization to your account by BasicAuth.""" - enable_experimental_streams: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('enable_experimental_streams'), 'exclude': lambda f: f is None }}) - r"""Allow the use of experimental streams which rely on undocumented Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables for more info.""" lookback_window_minutes: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window_minutes'), 'exclude': lambda f: f is None }}) r"""When set to N, the connector will always refresh resources created within the past N minutes. By default, updated objects that are not newly created are not incrementally synced.""" num_workers: Optional[int] = dataclasses.field(default=3, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_workers'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_jotform.py b/src/airbyte_api/models/source_jotform.py index 8580b3ce..9b1b3819 100644 --- a/src/airbyte_api/models/source_jotform.py +++ b/src/airbyte_api/models/source_jotform.py @@ -10,7 +10,7 @@ from typing import Final, Optional, Union -class SourceJotformSchemasAPIEndpoint(str, Enum): +class SourceJotformAPIEndpoint(str, Enum): ENTERPRISE = 'enterprise' @@ -19,12 +19,12 @@ class SourceJotformSchemasAPIEndpoint(str, Enum): class Enterprise: enterprise_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('enterprise_url') }}) r"""Upgrade to Enterprise to make your API url your-domain.com/API or subdomain.jotform.com/API instead of api.jotform.com""" - API_ENDPOINT: Final[Optional[SourceJotformSchemasAPIEndpoint]] = dataclasses.field(default=SourceJotformSchemasAPIEndpoint.ENTERPRISE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_endpoint'), 'exclude': lambda f: f is None }}) + API_ENDPOINT: Final[Optional[SourceJotformAPIEndpoint]] = dataclasses.field(default=SourceJotformAPIEndpoint.ENTERPRISE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_endpoint'), 'exclude': lambda f: f is None }}) -class SourceJotformAPIEndpoint(str, Enum): +class SourceJotformSchemasAPIEndpoint(str, Enum): BASIC = 'basic' @@ -38,7 +38,7 @@ class BaseURLPrefix(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class Basic: - API_ENDPOINT: Final[Optional[SourceJotformAPIEndpoint]] = dataclasses.field(default=SourceJotformAPIEndpoint.BASIC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_endpoint'), 'exclude': lambda f: f is None }}) + API_ENDPOINT: Final[Optional[SourceJotformSchemasAPIEndpoint]] = dataclasses.field(default=SourceJotformSchemasAPIEndpoint.BASIC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_endpoint'), 'exclude': lambda f: f is None }}) url_prefix: Optional[BaseURLPrefix] = dataclasses.field(default=BaseURLPrefix.STANDARD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url_prefix'), 'exclude': lambda f: f is None }}) r"""You can access our API through the following URLs - Standard API Usage (Use the default API URL - https://api.jotform.com), For EU (Use the EU API URL - https://eu-api.jotform.com), For HIPAA (Use the HIPAA API URL - https://hipaa-api.jotform.com)""" diff --git a/src/airbyte_api/models/source_judge_me_reviews.py b/src/airbyte_api/models/source_judge_me_reviews.py new file mode 100644 index 00000000..157f1c00 --- /dev/null +++ b/src/airbyte_api/models/source_judge_me_reviews.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class JudgeMeReviews(str, Enum): + JUDGE_ME_REVIEWS = 'judge-me-reviews' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceJudgeMeReviews: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + shop_domain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('shop_domain') }}) + r"""example.myshopify.com""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[JudgeMeReviews] = dataclasses.field(default=JudgeMeReviews.JUDGE_ME_REVIEWS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_keka.py b/src/airbyte_api/models/source_keka.py new file mode 100644 index 00000000..4f633941 --- /dev/null +++ b/src/airbyte_api/models/source_keka.py @@ -0,0 +1,27 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Keka(str, Enum): + KEKA = 'keka' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceKeka: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + r"""Your client identifier for authentication.""" + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + r"""Your client secret for secure authentication.""" + grant_type: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('grant_type') }}) + scope: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('scope') }}) + SOURCE_TYPE: Final[Keka] = dataclasses.field(default=Keka.KEKA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_klaviyo.py b/src/airbyte_api/models/source_klaviyo.py index 2b1a0d1d..9cc32263 100644 --- a/src/airbyte_api/models/source_klaviyo.py +++ b/src/airbyte_api/models/source_klaviyo.py @@ -22,7 +22,7 @@ class SourceKlaviyo: disable_fetching_predictive_analytics: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('disable_fetching_predictive_analytics'), 'exclude': lambda f: f is None }}) r"""Certain streams like the profiles stream can retrieve predictive analytics data from Klaviyo's API. However, at high volume, this can lead to service availability issues on the API which can be improved by not fetching this field. WARNING: Enabling this setting will stop the \\"predictive_analytics\\" column from being populated in your downstream destination.""" num_workers: Optional[int] = dataclasses.field(default=10, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_workers'), 'exclude': lambda f: f is None }}) - r"""The number of worker threads to use for the sync. The performance upper boundary is based on the limit of your Chargebee plan. More info about the rate limit plan tiers can be found on Chargebee's API docs.""" + r"""The number of worker threads to use for the sync. The performance upper boundary is based on the limit of your Klaviyo plan. More info about the rate limit plan tiers can be found on Klaviyo's API docs.""" SOURCE_TYPE: Final[Klaviyo] = dataclasses.field(default=Klaviyo.KLAVIYO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. This field is optional - if not provided, all data will be replicated.""" diff --git a/src/airbyte_api/models/source_linear.py b/src/airbyte_api/models/source_linear.py new file mode 100644 index 00000000..b253ca34 --- /dev/null +++ b/src/airbyte_api/models/source_linear.py @@ -0,0 +1,21 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Linear(str, Enum): + LINEAR = 'linear' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceLinear: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + SOURCE_TYPE: Final[Linear] = dataclasses.field(default=Linear.LINEAR, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_linkedin_ads.py b/src/airbyte_api/models/source_linkedin_ads.py index 7fdff697..0165ebac 100644 --- a/src/airbyte_api/models/source_linkedin_ads.py +++ b/src/airbyte_api/models/source_linkedin_ads.py @@ -103,6 +103,8 @@ class SourceLinkedinAds: credentials: Optional[SourceLinkedinAdsAuthentication] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) lookback_window: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window'), 'exclude': lambda f: f is None }}) r"""How far into the past to look for records. (in days)""" + num_workers: Optional[int] = dataclasses.field(default=3, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_workers'), 'exclude': lambda f: f is None }}) + r"""The number of workers to use for the connector. This is used to limit the number of concurrent requests to the LinkedIn Ads API. If not set, the default is 3 workers.""" SOURCE_TYPE: Final[SourceLinkedinAdsLinkedinAds] = dataclasses.field(default=SourceLinkedinAdsLinkedinAds.LINKEDIN_ADS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_mailchimp.py b/src/airbyte_api/models/source_mailchimp.py index 76b7d615..004c619e 100644 --- a/src/airbyte_api/models/source_mailchimp.py +++ b/src/airbyte_api/models/source_mailchimp.py @@ -16,7 +16,7 @@ class SourceMailchimpSchemasAuthType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class APIKey: +class SourceMailchimpAPIKey: apikey: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('apikey') }}) r"""Mailchimp API Key. See the docs for information on how to generate this key.""" AUTH_TYPE: Final[SourceMailchimpSchemasAuthType] = dataclasses.field(default=SourceMailchimpSchemasAuthType.APIKEY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) @@ -56,4 +56,4 @@ class SourceMailchimp: -SourceMailchimpAuthentication = Union[SourceMailchimpOAuth20, APIKey] +SourceMailchimpAuthentication = Union[SourceMailchimpOAuth20, SourceMailchimpAPIKey] diff --git a/src/airbyte_api/models/source_mailersend.py b/src/airbyte_api/models/source_mailersend.py new file mode 100644 index 00000000..e2c80460 --- /dev/null +++ b/src/airbyte_api/models/source_mailersend.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class Mailersend(str, Enum): + MAILERSEND = 'mailersend' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceMailersend: + api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }}) + r"""Your API Token. See here.""" + domain_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain_id') }}) + r"""The domain entity in mailersend""" + SOURCE_TYPE: Final[Mailersend] = dataclasses.field(default=Mailersend.MAILERSEND, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + start_date: Optional[float] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'exclude': lambda f: f is None }}) + r"""Timestamp is assumed to be UTC.""" + + diff --git a/src/airbyte_api/models/source_mendeley.py b/src/airbyte_api/models/source_mendeley.py new file mode 100644 index 00000000..e9f956cd --- /dev/null +++ b/src/airbyte_api/models/source_mendeley.py @@ -0,0 +1,33 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Mendeley(str, Enum): + MENDELEY = 'mendeley' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceMendeley: + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + r"""Could be found at `https://dev.mendeley.com/myapps.html`""" + client_refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_refresh_token') }}) + r"""Use cURL or Postman with the OAuth 2.0 Authorization tab. Set the Auth URL to https://api.mendeley.com/oauth/authorize, the Token URL to https://api.mendeley.com/oauth/token, and use all as the scope.""" + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + r"""Could be found at `https://dev.mendeley.com/myapps.html`""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + name_for_institution: Optional[str] = dataclasses.field(default='City University', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name_for_institution'), 'exclude': lambda f: f is None }}) + r"""The name parameter for institutions search""" + query_for_catalog: Optional[str] = dataclasses.field(default='Polar Bear', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query_for_catalog'), 'exclude': lambda f: f is None }}) + r"""Query for catalog search""" + SOURCE_TYPE: Final[Mendeley] = dataclasses.field(default=Mendeley.MENDELEY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_mercado_ads.py b/src/airbyte_api/models/source_mercado_ads.py new file mode 100644 index 00000000..aa5c1805 --- /dev/null +++ b/src/airbyte_api/models/source_mercado_ads.py @@ -0,0 +1,29 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import date +from enum import Enum +from typing import Final, Optional + + +class MercadoAds(str, Enum): + MERCADO_ADS = 'mercado-ads' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceMercadoAds: + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + client_refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_refresh_token') }}) + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + end_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) + r"""Cannot exceed 90 days from current day for Product Ads""" + lookback_days: Optional[float] = dataclasses.field(default=7, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_days'), 'exclude': lambda f: f is None }}) + SOURCE_TYPE: Final[MercadoAds] = dataclasses.field(default=MercadoAds.MERCADO_ADS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + start_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) + r"""Cannot exceed 90 days from current day for Product Ads, and 90 days from \\"End Date\\" on Brand and Display Ads""" + + diff --git a/src/airbyte_api/models/source_merge.py b/src/airbyte_api/models/source_merge.py new file mode 100644 index 00000000..28626ce5 --- /dev/null +++ b/src/airbyte_api/models/source_merge.py @@ -0,0 +1,28 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Merge(str, Enum): + MERGE = 'merge' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceMerge: + account_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('account_token') }}) + r"""Link your other integrations with account credentials on accounts section to get account token (ref - https://app.merge.dev/linked-accounts/accounts)""" + api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }}) + r"""API token can be seen at https://app.merge.dev/keys""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + r"""Date time filter for incremental filter, Specify which date to extract from.""" + SOURCE_TYPE: Final[Merge] = dataclasses.field(default=Merge.MERGE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_microsoft_sharepoint.py b/src/airbyte_api/models/source_microsoft_sharepoint.py index 514e5ac4..11cf8192 100644 --- a/src/airbyte_api/models/source_microsoft_sharepoint.py +++ b/src/airbyte_api/models/source_microsoft_sharepoint.py @@ -56,6 +56,34 @@ class SourceMicrosoftSharepointAuthenticateViaMicrosoftOAuth: +class SourceMicrosoftSharepointSchemasDeliveryType(str, Enum): + USE_FILE_TRANSFER = 'use_file_transfer' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceMicrosoftSharepointCopyRawFiles: + r"""Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.""" + DELIVERY_TYPE: Final[Optional[SourceMicrosoftSharepointSchemasDeliveryType]] = dataclasses.field(default=SourceMicrosoftSharepointSchemasDeliveryType.USE_FILE_TRANSFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_type'), 'exclude': lambda f: f is None }}) + preserve_directory_structure: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('preserve_directory_structure'), 'exclude': lambda f: f is None }}) + r"""If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled.""" + + + + +class SourceMicrosoftSharepointDeliveryType(str, Enum): + USE_RECORDS_TRANSFER = 'use_records_transfer' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceMicrosoftSharepointReplicateRecords: + r"""Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.""" + DELIVERY_TYPE: Final[Optional[SourceMicrosoftSharepointDeliveryType]] = dataclasses.field(default=SourceMicrosoftSharepointDeliveryType.USE_RECORDS_TRANSFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_type'), 'exclude': lambda f: f is None }}) + + + + class SourceMicrosoftSharepointSearchScope(str, Enum): r"""Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both.""" ACCESSIBLE_DRIVES = 'ACCESSIBLE_DRIVES' @@ -275,10 +303,13 @@ class SourceMicrosoftSharepoint: r"""Credentials for connecting to the One Drive API""" streams: List[SourceMicrosoftSharepointFileBasedStreamConfig] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('streams') }}) r"""Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.""" + delivery_method: Optional[SourceMicrosoftSharepointDeliveryMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_method'), 'exclude': lambda f: f is None }}) folder_path: Optional[str] = dataclasses.field(default='.', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('folder_path'), 'exclude': lambda f: f is None }}) r"""Path to a specific folder within the drives to search for files. Leave empty to search all folders of the drives. This does not apply to shared items.""" search_scope: Optional[SourceMicrosoftSharepointSearchScope] = dataclasses.field(default=SourceMicrosoftSharepointSearchScope.ALL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('search_scope'), 'exclude': lambda f: f is None }}) r"""Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both.""" + site_url: Optional[str] = dataclasses.field(default='', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('site_url'), 'exclude': lambda f: f is None }}) + r"""Url of SharePoint site to search for files. Leave empty to search in the main site. Use 'https://.sharepoint.com/sites/' to iterate over all sites.""" SOURCE_TYPE: Final[SourceMicrosoftSharepointMicrosoftSharepoint] = dataclasses.field(default=SourceMicrosoftSharepointMicrosoftSharepoint.MICROSOFT_SHAREPOINT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) r"""UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.""" @@ -287,6 +318,8 @@ class SourceMicrosoftSharepoint: SourceMicrosoftSharepointAuthentication = Union[SourceMicrosoftSharepointAuthenticateViaMicrosoftOAuth, SourceMicrosoftSharepointServiceKeyAuthentication] +SourceMicrosoftSharepointDeliveryMethod = Union[SourceMicrosoftSharepointReplicateRecords, SourceMicrosoftSharepointCopyRawFiles] + SourceMicrosoftSharepointProcessing = Union[SourceMicrosoftSharepointLocal] SourceMicrosoftSharepointCSVHeaderDefinition = Union[SourceMicrosoftSharepointFromCSV, SourceMicrosoftSharepointAutogenerated, SourceMicrosoftSharepointUserProvided] diff --git a/src/airbyte_api/models/source_mixpanel.py b/src/airbyte_api/models/source_mixpanel.py index 029d66e7..9f114256 100644 --- a/src/airbyte_api/models/source_mixpanel.py +++ b/src/airbyte_api/models/source_mixpanel.py @@ -63,6 +63,8 @@ class SourceMixpanel: r"""Defines window size in days, that used to slice through data. You can reduce it, if amount of data in each window is too big for your environment. (This value should be positive integer)""" end_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) r"""The date in the format YYYY-MM-DD. Any data after this date will not be replicated. Left empty to always sync to most recent date""" + export_lookback_window: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('export_lookback_window'), 'exclude': lambda f: f is None }}) + r"""The number of seconds to look back from the last synced timestamp during incremental syncs of the Export stream. This ensures no data is missed due to delays in event recording. Default is 0 seconds. Must be a non-negative integer.""" page_size: Optional[int] = dataclasses.field(default=1000, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('page_size'), 'exclude': lambda f: f is None }}) r"""The number of records to fetch per request for the engage stream. Default is 1000. If you are experiencing long sync times with this stream, try increasing this value.""" project_timezone: Optional[str] = dataclasses.field(default='US/Pacific', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project_timezone'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_mode.py b/src/airbyte_api/models/source_mode.py index d1b1448a..83140e50 100644 --- a/src/airbyte_api/models/source_mode.py +++ b/src/airbyte_api/models/source_mode.py @@ -8,7 +8,7 @@ from typing import Final -class Mode(str, Enum): +class SourceModeMode(str, Enum): MODE = 'mode' @@ -20,6 +20,6 @@ class SourceMode: api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }}) r"""API token to use as the username for Basic Authentication.""" workspace: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspace') }}) - SOURCE_TYPE: Final[Mode] = dataclasses.field(default=Mode.MODE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + SOURCE_TYPE: Final[SourceModeMode] = dataclasses.field(default=SourceModeMode.MODE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_monday.py b/src/airbyte_api/models/source_monday.py index ce326077..518d76a8 100644 --- a/src/airbyte_api/models/source_monday.py +++ b/src/airbyte_api/models/source_monday.py @@ -5,7 +5,7 @@ from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from enum import Enum -from typing import Final, Optional, Union +from typing import Final, List, Optional, Union class SourceMondaySchemasAuthType(str, Enum): @@ -49,7 +49,11 @@ class SourceMondayMonday(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceMonday: + board_ids: Optional[List[int]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('board_ids'), 'exclude': lambda f: f is None }}) + r"""The IDs of the boards that the Items and Boards streams will extract records from. When left empty, streams will extract records from all boards that exist within the account.""" credentials: Optional[SourceMondayAuthorizationMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) + num_workers: Optional[int] = dataclasses.field(default=4, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_workers'), 'exclude': lambda f: f is None }}) + r"""The number of worker threads to use for the sync.""" SOURCE_TYPE: Final[SourceMondayMonday] = dataclasses.field(default=SourceMondayMonday.MONDAY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_mongodb_v2.py b/src/airbyte_api/models/source_mongodb_v2.py index 3d61b45c..d0858262 100644 --- a/src/airbyte_api/models/source_mongodb_v2.py +++ b/src/airbyte_api/models/source_mongodb_v2.py @@ -5,10 +5,10 @@ from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from enum import Enum -from typing import Any, Dict, Final, Optional, Union +from typing import Any, Dict, Final, List, Optional, Union -class SourceMongodbV2SchemasClusterType(str, Enum): +class SourceMongodbV2ClusterType(str, Enum): SELF_MANAGED_REPLICA_SET = 'SELF_MANAGED_REPLICA_SET' @@ -18,12 +18,12 @@ class SelfManagedReplicaSet: r"""MongoDB self-hosted cluster configured as a replica set""" connection_string: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connection_string') }}) r"""The connection string of the cluster that you want to replicate. https://www.mongodb.com/docs/manual/reference/connection-string/#find-your-self-hosted-deployment-s-connection-string for more information.""" - database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }}) - r"""The name of the MongoDB database that contains the collection(s) to replicate.""" + databases: List[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('databases') }}) + r"""The names of the MongoDB databases that contain the collection(s) to replicate.""" additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) auth_source: Optional[str] = dataclasses.field(default='admin', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_source'), 'exclude': lambda f: f is None }}) r"""The authentication source where the user information is stored.""" - CLUSTER_TYPE: Final[SourceMongodbV2SchemasClusterType] = dataclasses.field(default=SourceMongodbV2SchemasClusterType.SELF_MANAGED_REPLICA_SET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cluster_type') }}) + CLUSTER_TYPE: Final[SourceMongodbV2ClusterType] = dataclasses.field(default=SourceMongodbV2ClusterType.SELF_MANAGED_REPLICA_SET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cluster_type') }}) password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) r"""The password associated with this username.""" schema_enforced: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schema_enforced'), 'exclude': lambda f: f is None }}) @@ -34,7 +34,7 @@ class SelfManagedReplicaSet: -class SourceMongodbV2ClusterType(str, Enum): +class SourceMongodbV2SchemasClusterType(str, Enum): ATLAS_REPLICA_SET = 'ATLAS_REPLICA_SET' @@ -44,8 +44,8 @@ class MongoDBAtlasReplicaSet: r"""MongoDB Atlas-hosted cluster configured as a replica set""" connection_string: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connection_string') }}) r"""The connection string of the cluster that you want to replicate.""" - database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }}) - r"""The name of the MongoDB database that contains the collection(s) to replicate.""" + databases: List[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('databases') }}) + r"""The names of the MongoDB databases that contain the collection(s) to replicate.""" password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) r"""The password associated with this username.""" username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) @@ -53,7 +53,7 @@ class MongoDBAtlasReplicaSet: additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) auth_source: Optional[str] = dataclasses.field(default='admin', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_source'), 'exclude': lambda f: f is None }}) r"""The authentication source where the user information is stored. See https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.authSource for more details.""" - CLUSTER_TYPE: Final[SourceMongodbV2ClusterType] = dataclasses.field(default=SourceMongodbV2ClusterType.ATLAS_REPLICA_SET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cluster_type') }}) + CLUSTER_TYPE: Final[SourceMongodbV2SchemasClusterType] = dataclasses.field(default=SourceMongodbV2SchemasClusterType.ATLAS_REPLICA_SET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cluster_type') }}) schema_enforced: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schema_enforced'), 'exclude': lambda f: f is None }}) r"""When enabled, syncs will validate and structure records against the stream's schema.""" @@ -83,6 +83,8 @@ class SourceMongodbV2: r"""Configures the MongoDB cluster type.""" discover_sample_size: Optional[int] = dataclasses.field(default=10000, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('discover_sample_size'), 'exclude': lambda f: f is None }}) r"""The maximum number of documents to sample when attempting to discover the unique fields for a collection.""" + discover_timeout_seconds: Optional[int] = dataclasses.field(default=600, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('discover_timeout_seconds'), 'exclude': lambda f: f is None }}) + r"""The amount of time the connector will wait when it discovers a document. Defaults to 600 seconds. Valid range: 5 seconds to 1200 seconds.""" initial_load_timeout_hours: Optional[int] = dataclasses.field(default=8, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('initial_load_timeout_hours'), 'exclude': lambda f: f is None }}) r"""The amount of time an initial load is allowed to continue for before catching up on CDC logs.""" initial_waiting_seconds: Optional[int] = dataclasses.field(default=300, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('initial_waiting_seconds'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_mssql.py b/src/airbyte_api/models/source_mssql.py index cceaacf3..a62faea4 100644 --- a/src/airbyte_api/models/source_mssql.py +++ b/src/airbyte_api/models/source_mssql.py @@ -16,6 +16,8 @@ class SourceMssqlSchemasMethod(str, Enum): @dataclasses.dataclass class ScanChangesWithUserDefinedCursor: r"""Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).""" + exclude_todays_data: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('exclude_todays_data'), 'exclude': lambda f: f is None }}) + r"""When enabled incremental syncs using a cursor of a temporal types (date or datetime) will include cursor values only up until last midnight (Advanced)""" METHOD: Final[SourceMssqlSchemasMethod] = dataclasses.field(default=SourceMssqlSchemasMethod.STANDARD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) @@ -52,7 +54,7 @@ class SourceMssqlMssql(str, Enum): MSSQL = 'mssql' -class SourceMssqlSchemasSSLMethodSSLMethodSSLMethod(str, Enum): +class SourceMssqlSchemasSslMethodSslMethod(str, Enum): ENCRYPTED_VERIFY_CERTIFICATE = 'encrypted_verify_certificate' @@ -64,12 +66,12 @@ class SourceMssqlEncryptedVerifyCertificate: r"""certificate of the server, or of the CA that signed the server certificate""" host_name_in_certificate: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hostNameInCertificate'), 'exclude': lambda f: f is None }}) r"""Specifies the host name of the server. The value of this property must match the subject property of the certificate.""" - SSL_METHOD: Final[SourceMssqlSchemasSSLMethodSSLMethodSSLMethod] = dataclasses.field(default=SourceMssqlSchemasSSLMethodSSLMethodSSLMethod.ENCRYPTED_VERIFY_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }}) + SSL_METHOD: Final[SourceMssqlSchemasSslMethodSslMethod] = dataclasses.field(default=SourceMssqlSchemasSslMethodSslMethod.ENCRYPTED_VERIFY_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }}) -class SourceMssqlSchemasSslMethodSslMethod(str, Enum): +class SourceMssqlSchemasSslMethod(str, Enum): ENCRYPTED_TRUST_SERVER_CERTIFICATE = 'encrypted_trust_server_certificate' @@ -77,12 +79,12 @@ class SourceMssqlSchemasSslMethodSslMethod(str, Enum): @dataclasses.dataclass class SourceMssqlEncryptedTrustServerCertificate: r"""Use the certificate provided by the server without verification. (For testing purposes only!)""" - SSL_METHOD: Final[SourceMssqlSchemasSslMethodSslMethod] = dataclasses.field(default=SourceMssqlSchemasSslMethodSslMethod.ENCRYPTED_TRUST_SERVER_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }}) + SSL_METHOD: Final[SourceMssqlSchemasSslMethod] = dataclasses.field(default=SourceMssqlSchemasSslMethod.ENCRYPTED_TRUST_SERVER_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }}) -class SourceMssqlSchemasSslMethod(str, Enum): +class SourceMssqlSchemasSSLMethodSSLMethodSSLMethod(str, Enum): UNENCRYPTED = 'unencrypted' @@ -90,7 +92,7 @@ class SourceMssqlSchemasSslMethod(str, Enum): @dataclasses.dataclass class SourceMssqlUnencrypted: r"""Data transfer will not be encrypted.""" - SSL_METHOD: Final[SourceMssqlSchemasSslMethod] = dataclasses.field(default=SourceMssqlSchemasSslMethod.UNENCRYPTED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }}) + SSL_METHOD: Final[SourceMssqlSchemasSSLMethodSSLMethodSSLMethod] = dataclasses.field(default=SourceMssqlSchemasSSLMethodSSLMethodSSLMethod.UNENCRYPTED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }}) diff --git a/src/airbyte_api/models/source_mysql.py b/src/airbyte_api/models/source_mysql.py index bfe238f4..07839818 100644 --- a/src/airbyte_api/models/source_mysql.py +++ b/src/airbyte_api/models/source_mysql.py @@ -5,70 +5,71 @@ from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from enum import Enum -from typing import Final, Optional, Union - - -class SourceMysqlSchemasMethod(str, Enum): - STANDARD = 'STANDARD' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SourceMysqlScanChangesWithUserDefinedCursor: - r"""Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).""" - METHOD: Final[SourceMysqlSchemasMethod] = dataclasses.field(default=SourceMysqlSchemasMethod.STANDARD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) - - +from typing import Any, Dict, Final, Optional, Union class SourceMysqlInvalidCDCPositionBehaviorAdvanced(str, Enum): - r"""Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.""" + r"""Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value in the mined logs. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.""" FAIL_SYNC = 'Fail sync' RE_SYNC_DATA = 'Re-sync data' -class SourceMysqlMethod(str, Enum): +class SourceMysqlSchemasMethod(str, Enum): CDC = 'CDC' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class ReadChangesUsingBinaryLogCDC: - r"""Recommended - Incrementally reads new inserts, updates, and deletes using the MySQL binary log. This must be enabled on your database.""" +class SourceMysqlReadChangesUsingChangeDataCaptureCDC: + r"""Recommended - Incrementally reads new inserts, updates, and deletes using MySQL's change data capture feature. This must be enabled on your database.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) initial_load_timeout_hours: Optional[int] = dataclasses.field(default=8, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('initial_load_timeout_hours'), 'exclude': lambda f: f is None }}) r"""The amount of time an initial load is allowed to continue for before catching up on CDC logs.""" - initial_waiting_seconds: Optional[int] = dataclasses.field(default=300, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('initial_waiting_seconds'), 'exclude': lambda f: f is None }}) - r"""The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.""" invalid_cdc_cursor_position_behavior: Optional[SourceMysqlInvalidCDCPositionBehaviorAdvanced] = dataclasses.field(default=SourceMysqlInvalidCDCPositionBehaviorAdvanced.FAIL_SYNC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('invalid_cdc_cursor_position_behavior'), 'exclude': lambda f: f is None }}) - r"""Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.""" - METHOD: Final[SourceMysqlMethod] = dataclasses.field(default=SourceMysqlMethod.CDC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) - server_time_zone: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('server_time_zone'), 'exclude': lambda f: f is None }}) + r"""Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value in the mined logs. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.""" + method: Optional[SourceMysqlSchemasMethod] = dataclasses.field(default=SourceMysqlSchemasMethod.CDC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method'), 'exclude': lambda f: f is None }}) + server_timezone: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('server_timezone'), 'exclude': lambda f: f is None }}) r"""Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.""" +class SourceMysqlMethod(str, Enum): + STANDARD = 'STANDARD' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceMysqlScanChangesWithUserDefinedCursor: + r"""Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + method: Optional[SourceMysqlMethod] = dataclasses.field(default=SourceMysqlMethod.STANDARD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method'), 'exclude': lambda f: f is None }}) + + + + class SourceMysqlMysql(str, Enum): MYSQL = 'mysql' -class SourceMysqlSchemasSSLModeSSLModesMode(str, Enum): +class SourceMysqlSchemasSslModeEncryptionMode(str, Enum): VERIFY_IDENTITY = 'verify_identity' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class VerifyIdentity: - r"""Always connect with SSL. Verify both CA and Hostname.""" + r"""To always require encryption and verify that the source has a valid SSL certificate.""" ca_certificate: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ca_certificate') }}) r"""CA certificate""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) client_certificate: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_certificate'), 'exclude': lambda f: f is None }}) - r"""Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)""" + r"""Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)""" client_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_key'), 'exclude': lambda f: f is None }}) - r"""Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)""" + r"""Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)""" client_key_password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_key_password'), 'exclude': lambda f: f is None }}) r"""Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.""" - MODE: Final[SourceMysqlSchemasSSLModeSSLModesMode] = dataclasses.field(default=SourceMysqlSchemasSSLModeSSLModesMode.VERIFY_IDENTITY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }}) + mode: Optional[SourceMysqlSchemasSslModeEncryptionMode] = dataclasses.field(default=SourceMysqlSchemasSslModeEncryptionMode.VERIFY_IDENTITY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) @@ -79,17 +80,18 @@ class SourceMysqlSchemasSslModeMode(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class SourceMysqlVerifyCA: - r"""Always connect with SSL. Verifies CA, but allows connection even if Hostname does not match.""" +class SourceMysqlVerifyCa: + r"""To always require encryption and verify that the source has a valid SSL certificate.""" ca_certificate: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ca_certificate') }}) r"""CA certificate""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) client_certificate: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_certificate'), 'exclude': lambda f: f is None }}) - r"""Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)""" + r"""Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)""" client_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_key'), 'exclude': lambda f: f is None }}) - r"""Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)""" + r"""Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)""" client_key_password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_key_password'), 'exclude': lambda f: f is None }}) r"""Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.""" - MODE: Final[SourceMysqlSchemasSslModeMode] = dataclasses.field(default=SourceMysqlSchemasSslModeMode.VERIFY_CA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }}) + mode: Optional[SourceMysqlSchemasSslModeMode] = dataclasses.field(default=SourceMysqlSchemasSslModeMode.VERIFY_CA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) @@ -101,8 +103,9 @@ class SourceMysqlSchemasMode(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class Required: - r"""Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified.""" - MODE: Final[SourceMysqlSchemasMode] = dataclasses.field(default=SourceMysqlSchemasMode.REQUIRED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }}) + r"""To always require encryption. Note: The connection will fail if the source doesn't support encryption.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + mode: Optional[SourceMysqlSchemasMode] = dataclasses.field(default=SourceMysqlSchemasMode.REQUIRED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) @@ -114,28 +117,29 @@ class SourceMysqlMode(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class Preferred: - r"""Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection.""" - MODE: Final[SourceMysqlMode] = dataclasses.field(default=SourceMysqlMode.PREFERRED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode') }}) + r"""To allow unencrypted communication only when the source doesn't support encryption.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + mode: Optional[SourceMysqlMode] = dataclasses.field(default=SourceMysqlMode.PREFERRED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) class SourceMysqlSchemasTunnelMethodTunnelMethod(str, Enum): - r"""Connect through a jump server tunnel host using username and password authentication""" SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceMysqlPasswordAuthentication: + r"""Connect through a jump server tunnel host using username and password authentication""" tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }}) r"""Hostname of the jump server host that allows inbound ssh tunnel.""" tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }}) r"""OS-level username for logging into the jump server host""" tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }}) r"""OS-level password for logging into the jump server host""" - TUNNEL_METHOD: Final[SourceMysqlSchemasTunnelMethodTunnelMethod] = dataclasses.field(default=SourceMysqlSchemasTunnelMethodTunnelMethod.SSH_PASSWORD_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }}) - r"""Connect through a jump server tunnel host using username and password authentication""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[SourceMysqlSchemasTunnelMethodTunnelMethod] = dataclasses.field(default=SourceMysqlSchemasTunnelMethodTunnelMethod.SSH_PASSWORD_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) tunnel_port: Optional[int] = dataclasses.field(default=22, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port'), 'exclude': lambda f: f is None }}) r"""Port on the proxy/jump server that accepts inbound ssh connections.""" @@ -143,21 +147,21 @@ class SourceMysqlPasswordAuthentication: class SourceMysqlSchemasTunnelMethod(str, Enum): - r"""Connect through a jump server tunnel host using username and ssh key""" SSH_KEY_AUTH = 'SSH_KEY_AUTH' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceMysqlSSHKeyAuthentication: + r"""Connect through a jump server tunnel host using username and ssh key""" ssh_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssh_key') }}) r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )""" tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }}) r"""Hostname of the jump server host that allows inbound ssh tunnel.""" tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }}) - r"""OS-level username for logging into the jump server host.""" - TUNNEL_METHOD: Final[SourceMysqlSchemasTunnelMethod] = dataclasses.field(default=SourceMysqlSchemasTunnelMethod.SSH_KEY_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }}) - r"""Connect through a jump server tunnel host using username and ssh key""" + r"""OS-level username for logging into the jump server host""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[SourceMysqlSchemasTunnelMethod] = dataclasses.field(default=SourceMysqlSchemasTunnelMethod.SSH_KEY_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) tunnel_port: Optional[int] = dataclasses.field(default=22, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port'), 'exclude': lambda f: f is None }}) r"""Port on the proxy/jump server that accepts inbound ssh connections.""" @@ -165,15 +169,15 @@ class SourceMysqlSSHKeyAuthentication: class SourceMysqlTunnelMethod(str, Enum): - r"""No ssh tunnel needed to connect to database""" NO_TUNNEL = 'NO_TUNNEL' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceMysqlNoTunnel: - TUNNEL_METHOD: Final[SourceMysqlTunnelMethod] = dataclasses.field(default=SourceMysqlTunnelMethod.NO_TUNNEL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }}) r"""No ssh tunnel needed to connect to database""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[SourceMysqlTunnelMethod] = dataclasses.field(default=SourceMysqlTunnelMethod.NO_TUNNEL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) @@ -184,29 +188,33 @@ class SourceMysql: database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }}) r"""The database name.""" host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }}) - r"""The host name of the database.""" + r"""Hostname of the database.""" replication_method: SourceMysqlUpdateMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('replication_method') }}) r"""Configures how data is extracted from the database.""" username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) r"""The username which is used to access the database.""" + check_privileges: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('check_privileges'), 'exclude': lambda f: f is None }}) + r"""When this feature is enabled, during schema discovery the connector will query each table or view individually to check access privileges and inaccessible tables, views, or columns therein will be removed. In large schemas, this might cause schema discovery to take too long, in which case it might be advisable to disable this feature.""" + checkpoint_target_interval_seconds: Optional[int] = dataclasses.field(default=300, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('checkpoint_target_interval_seconds'), 'exclude': lambda f: f is None }}) + r"""How often (in seconds) a stream should checkpoint, when possible.""" jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) - r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.""" + r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).""" + max_db_connections: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_db_connections'), 'exclude': lambda f: f is None }}) + r"""Maximum number of concurrent queries to the database. Leave empty to let Airbyte optimize performance.""" password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) r"""The password associated with the username.""" port: Optional[int] = dataclasses.field(default=3306, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port'), 'exclude': lambda f: f is None }}) - r"""The port to connect to.""" + r"""Port of the database.""" SOURCE_TYPE: Final[SourceMysqlMysql] = dataclasses.field(default=SourceMysqlMysql.MYSQL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) - ssl: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl'), 'exclude': lambda f: f is None }}) - r"""Encrypt data using SSL.""" - ssl_mode: Optional[SourceMysqlSSLModes] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_mode'), 'exclude': lambda f: f is None }}) - r"""SSL connection modes. Read more in the docs.""" + ssl_mode: Optional[SourceMysqlEncryption] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_mode'), 'exclude': lambda f: f is None }}) + r"""The encryption method which is used when communicating with the database.""" tunnel_method: Optional[SourceMysqlSSHTunnelMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" -SourceMysqlUpdateMethod = Union[ReadChangesUsingBinaryLogCDC, SourceMysqlScanChangesWithUserDefinedCursor] +SourceMysqlUpdateMethod = Union[SourceMysqlScanChangesWithUserDefinedCursor, SourceMysqlReadChangesUsingChangeDataCaptureCDC] -SourceMysqlSSLModes = Union[Preferred, Required, SourceMysqlVerifyCA, VerifyIdentity] +SourceMysqlEncryption = Union[Preferred, Required, SourceMysqlVerifyCa, VerifyIdentity] SourceMysqlSSHTunnelMethod = Union[SourceMysqlNoTunnel, SourceMysqlSSHKeyAuthentication, SourceMysqlPasswordAuthentication] diff --git a/src/airbyte_api/models/source_navan.py b/src/airbyte_api/models/source_navan.py new file mode 100644 index 00000000..758f5e64 --- /dev/null +++ b/src/airbyte_api/models/source_navan.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Navan(str, Enum): + NAVAN = 'navan' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceNavan: + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Navan] = dataclasses.field(default=Navan.NAVAN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_nebius_ai.py b/src/airbyte_api/models/source_nebius_ai.py new file mode 100644 index 00000000..9796980d --- /dev/null +++ b/src/airbyte_api/models/source_nebius_ai.py @@ -0,0 +1,27 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class NebiusAi(str, Enum): + NEBIUS_AI = 'nebius-ai' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceNebiusAi: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""API key or access token""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + limit: Optional[str] = dataclasses.field(default='20', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('limit'), 'exclude': lambda f: f is None }}) + r"""Limit for each response objects""" + SOURCE_TYPE: Final[NebiusAi] = dataclasses.field(default=NebiusAi.NEBIUS_AI, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_netsuite_enterprise.py b/src/airbyte_api/models/source_netsuite_enterprise.py new file mode 100644 index 00000000..e577c6b3 --- /dev/null +++ b/src/airbyte_api/models/source_netsuite_enterprise.py @@ -0,0 +1,180 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Any, Dict, Final, Optional, Union + + +class SourceNetsuiteEnterpriseSchemasAuthenticationMethodAuthenticationMethod(str, Enum): + OAUTH2_AUTHENTICATION = 'oauth2_authentication' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class OAuth2Authentication: + r"""Authenticate using OAuth2. This requires a consumer key, the private part of the certificate with which netsuite OAuth2 Client Credentials was setup and the certificate ID for the OAuth2 setup entry.""" + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + r"""The consumer key used for OAuth2 authentication. This is generated in NetSuite when creating an integration record.""" + key_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('key_id') }}) + r"""The certificate ID for the OAuth 2.0 Client Credentials Setup entry.""" + oauth2_private_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('oauth2_private_key') }}) + r"""The private portion of the certificate with which OAuth2 was setup. ( created with openssl req -new -x509 -newkey rsa:4096 -keyout private.pem -sigopt rsa_padding_mode:pss -sha256 -sigopt rsa_pss_saltlen:64 -out public.pem -nodes -days 365 )""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + authentication_method: Optional[SourceNetsuiteEnterpriseSchemasAuthenticationMethodAuthenticationMethod] = dataclasses.field(default=SourceNetsuiteEnterpriseSchemasAuthenticationMethodAuthenticationMethod.OAUTH2_AUTHENTICATION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authentication_method'), 'exclude': lambda f: f is None }}) + + + + +class SourceNetsuiteEnterpriseSchemasAuthenticationMethod(str, Enum): + TOKEN_BASED_AUTHENTICATION = 'token_based_authentication' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class TokenBasedAuthentication: + r"""Authenticate using a token-based authentication method. This requires a consumer key and secret, as well as a token ID and secret.""" + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + r"""The consumer key used for token-based authentication. This is generated in NetSuite when creating an integration record.""" + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + r"""The consumer secret used for token-based authentication. This is generated in NetSuite when creating an integration record.""" + token_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token_id') }}) + r"""The token ID used for token-based authentication. This is generated in NetSuite when creating a token-based role.""" + token_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token_secret') }}) + r"""The token secret used for token-based authentication. This is generated in NetSuite when creating a token-based role.Ensure to keep this value secure.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + authentication_method: Optional[SourceNetsuiteEnterpriseSchemasAuthenticationMethod] = dataclasses.field(default=SourceNetsuiteEnterpriseSchemasAuthenticationMethod.TOKEN_BASED_AUTHENTICATION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authentication_method'), 'exclude': lambda f: f is None }}) + + + + +class SourceNetsuiteEnterpriseSchemasAuthenticationMethodAuthenticationMethodAuthenticationMethod(str, Enum): + PASSWORD_AUTHENTICATION = 'password_authentication' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceNetsuiteEnterprisePasswordAuthentication: + r"""Authenticate using a password.""" + password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) + r"""The password associated with the username.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + authentication_method: Optional[SourceNetsuiteEnterpriseSchemasAuthenticationMethodAuthenticationMethodAuthenticationMethod] = dataclasses.field(default=SourceNetsuiteEnterpriseSchemasAuthenticationMethodAuthenticationMethodAuthenticationMethod.PASSWORD_AUTHENTICATION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authentication_method'), 'exclude': lambda f: f is None }}) + + + + +class CursorMethod(str, Enum): + USER_DEFINED = 'user_defined' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceNetsuiteEnterpriseScanChangesWithUserDefinedCursor: + r"""Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + cursor_method: Optional[CursorMethod] = dataclasses.field(default=CursorMethod.USER_DEFINED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cursor_method'), 'exclude': lambda f: f is None }}) + + + + +class NetsuiteEnterprise(str, Enum): + NETSUITE_ENTERPRISE = 'netsuite-enterprise' + + +class SourceNetsuiteEnterpriseSchemasTunnelMethodTunnelMethod(str, Enum): + SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceNetsuiteEnterpriseSchemasPasswordAuthentication: + r"""Connect through a jump server tunnel host using username and password authentication""" + tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }}) + r"""Hostname of the jump server host that allows inbound ssh tunnel.""" + tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }}) + r"""OS-level username for logging into the jump server host""" + tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }}) + r"""OS-level password for logging into the jump server host""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[SourceNetsuiteEnterpriseSchemasTunnelMethodTunnelMethod] = dataclasses.field(default=SourceNetsuiteEnterpriseSchemasTunnelMethodTunnelMethod.SSH_PASSWORD_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) + tunnel_port: Optional[int] = dataclasses.field(default=22, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port'), 'exclude': lambda f: f is None }}) + r"""Port on the proxy/jump server that accepts inbound ssh connections.""" + + + + +class SourceNetsuiteEnterpriseSchemasTunnelMethod(str, Enum): + SSH_KEY_AUTH = 'SSH_KEY_AUTH' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceNetsuiteEnterpriseSSHKeyAuthentication: + r"""Connect through a jump server tunnel host using username and ssh key""" + ssh_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssh_key') }}) + r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )""" + tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }}) + r"""Hostname of the jump server host that allows inbound ssh tunnel.""" + tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }}) + r"""OS-level username for logging into the jump server host""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[SourceNetsuiteEnterpriseSchemasTunnelMethod] = dataclasses.field(default=SourceNetsuiteEnterpriseSchemasTunnelMethod.SSH_KEY_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) + tunnel_port: Optional[int] = dataclasses.field(default=22, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port'), 'exclude': lambda f: f is None }}) + r"""Port on the proxy/jump server that accepts inbound ssh connections.""" + + + + +class SourceNetsuiteEnterpriseTunnelMethod(str, Enum): + NO_TUNNEL = 'NO_TUNNEL' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceNetsuiteEnterpriseNoTunnel: + r"""No ssh tunnel needed to connect to database""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[SourceNetsuiteEnterpriseTunnelMethod] = dataclasses.field(default=SourceNetsuiteEnterpriseTunnelMethod.NO_TUNNEL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceNetsuiteEnterprise: + account_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('account_id') }}) + r"""The username which is used to access the database.""" + authentication_method: SourceNetsuiteEnterpriseAuthenticationMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authentication_method') }}) + r"""Configure how to authenticate to Netsuite. Options include username/password or token-based authentication.""" + cursor: SourceNetsuiteEnterpriseUpdateMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cursor') }}) + r"""Configures how data is extracted from the database.""" + host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }}) + r"""Hostname of the database.""" + role_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('role_id') }}) + r"""The username which is used to access the database.""" + tunnel_method: SourceNetsuiteEnterpriseSSHTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }}) + r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + r"""The username which is used to access the database.""" + check_privileges: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('check_privileges'), 'exclude': lambda f: f is None }}) + r"""When this feature is enabled, during schema discovery the connector will query each table or view individually to check access privileges and inaccessible tables, views, or columns therein will be removed. In large schemas, this might cause schema discovery to take too long, in which case it might be advisable to disable this feature.""" + checkpoint_target_interval_seconds: Optional[int] = dataclasses.field(default=300, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('checkpoint_target_interval_seconds'), 'exclude': lambda f: f is None }}) + r"""How often (in seconds) a stream should checkpoint, when possible.""" + concurrency: Optional[int] = dataclasses.field(default=1, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('concurrency'), 'exclude': lambda f: f is None }}) + r"""Maximum number of concurrent queries to the database.""" + jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) + r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).""" + port: Optional[int] = dataclasses.field(default=1708, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port'), 'exclude': lambda f: f is None }}) + r"""Port of the database.""" + SOURCE_TYPE: Final[NetsuiteEnterprise] = dataclasses.field(default=NetsuiteEnterprise.NETSUITE_ENTERPRISE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + + +SourceNetsuiteEnterpriseAuthenticationMethod = Union[SourceNetsuiteEnterprisePasswordAuthentication, TokenBasedAuthentication, OAuth2Authentication] + +SourceNetsuiteEnterpriseUpdateMethod = Union[SourceNetsuiteEnterpriseScanChangesWithUserDefinedCursor] + +SourceNetsuiteEnterpriseSSHTunnelMethod = Union[SourceNetsuiteEnterpriseNoTunnel, SourceNetsuiteEnterpriseSSHKeyAuthentication, SourceNetsuiteEnterpriseSchemasPasswordAuthentication] diff --git a/src/airbyte_api/models/source_newsdata.py b/src/airbyte_api/models/source_newsdata.py new file mode 100644 index 00000000..6017b8d1 --- /dev/null +++ b/src/airbyte_api/models/source_newsdata.py @@ -0,0 +1,172 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Any, Final, List, Optional + + +class SourceNewsdataCategory(str, Enum): + BUSINESS = 'business' + ENTERTAINMENT = 'entertainment' + ENVIRONMENT = 'environment' + FOOD = 'food' + HEALTH = 'health' + POLITICS = 'politics' + SCIENCE = 'science' + SPORTS = 'sports' + TECHNOLOGY = 'technology' + TOP = 'top' + WORLD = 'world' + + +class SourceNewsdataCountry(str, Enum): + AR = 'ar' + AU = 'au' + AT = 'at' + BD = 'bd' + BY = 'by' + BE = 'be' + BR = 'br' + BG = 'bg' + CA = 'ca' + CL = 'cl' + CN = 'cn' + CO = 'co' + CR = 'cr' + CU = 'cu' + CZ = 'cz' + DK = 'dk' + DO = 'do' + EC = 'ec' + EG = 'eg' + EE = 'ee' + ET = 'et' + FI = 'fi' + FR = 'fr' + DE = 'de' + GR = 'gr' + HK = 'hk' + HU = 'hu' + IN = 'in' + ID = 'id' + IQ = 'iq' + IE = 'ie' + IL = 'il' + IT = 'it' + JP = 'jp' + KZ = 'kz' + KW = 'kw' + LV = 'lv' + LB = 'lb' + LT = 'lt' + MY = 'my' + MX = 'mx' + MA = 'ma' + MM = 'mm' + NL = 'nl' + NZ = 'nz' + NG = 'ng' + KP = 'kp' + NO = 'no' + PK = 'pk' + PE = 'pe' + PH = 'ph' + PL = 'pl' + PT = 'pt' + PR = 'pr' + RO = 'ro' + RU = 'ru' + SA = 'sa' + RS = 'rs' + SG = 'sg' + SK = 'sk' + SI = 'si' + ZA = 'za' + KR = 'kr' + ES = 'es' + SE = 'se' + CH = 'ch' + TW = 'tw' + TZ = 'tz' + TH = 'th' + TR = 'tr' + UA = 'ua' + AE = 'ae' + GB = 'gb' + US = 'us' + VE = 've' + VI = 'vi' + + +class SourceNewsdataLanguage(str, Enum): + BE = 'be' + AM = 'am' + AR = 'ar' + BN = 'bn' + BS = 'bs' + BG = 'bg' + MY = 'my' + CKB = 'ckb' + ZH = 'zh' + HR = 'hr' + CS = 'cs' + DA = 'da' + NL = 'nl' + EN = 'en' + ET = 'et' + FI = 'fi' + FR = 'fr' + DE = 'de' + EL = 'el' + HE = 'he' + HI = 'hi' + HU = 'hu' + IN = 'in' + IT = 'it' + JP = 'jp' + KO = 'ko' + LV = 'lv' + LT = 'lt' + MS = 'ms' + NO = 'no' + PL = 'pl' + PT = 'pt' + RO = 'ro' + RU = 'ru' + SR = 'sr' + SK = 'sk' + SL = 'sl' + ES = 'es' + SW = 'sw' + SV = 'sv' + TH = 'th' + TR = 'tr' + UK = 'uk' + UR = 'ur' + VI = 'vi' + + +class Newsdata(str, Enum): + NEWSDATA = 'newsdata' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceNewsdata: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""API Key""" + one_of: Optional[Any] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('OneOf'), 'exclude': lambda f: f is None }}) + category: Optional[List[SourceNewsdataCategory]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('category'), 'exclude': lambda f: f is None }}) + r"""Categories (maximum 5) to restrict the search to.""" + country: Optional[List[SourceNewsdataCountry]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('country'), 'exclude': lambda f: f is None }}) + r"""2-letter ISO 3166-1 countries (maximum 5) to restrict the search to.""" + domain: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain'), 'exclude': lambda f: f is None }}) + r"""Domains (maximum 5) to restrict the search to. Use the sources stream to find top sources id.""" + language: Optional[List[SourceNewsdataLanguage]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('language'), 'exclude': lambda f: f is None }}) + r"""Languages (maximum 5) to restrict the search to.""" + SOURCE_TYPE: Final[Newsdata] = dataclasses.field(default=Newsdata.NEWSDATA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_nexiopay.py b/src/airbyte_api/models/source_nexiopay.py new file mode 100644 index 00000000..267e1fa5 --- /dev/null +++ b/src/airbyte_api/models/source_nexiopay.py @@ -0,0 +1,35 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Nexiopay(str, Enum): + NEXIOPAY = 'nexiopay' + + +class Subdomain(str, Enum): + r"""The subdomain for the Nexio API environment, such as 'nexiopaysandbox' or 'nexiopay'.""" + NEXIOPAYSANDBOX = 'nexiopaysandbox' + NEXIOPAY = 'nexiopay' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceNexiopay: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your Nexio API key (password). You can find it in the Nexio Dashboard under Settings > User Management. Select the API user and copy the API key.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + r"""Your Nexio API username. You can find it in the Nexio Dashboard under Settings > User Management. Select the API user and copy the username.""" + SOURCE_TYPE: Final[Nexiopay] = dataclasses.field(default=Nexiopay.NEXIOPAY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + subdomain: Optional[Subdomain] = dataclasses.field(default=Subdomain.NEXIOPAY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('subdomain'), 'exclude': lambda f: f is None }}) + r"""The subdomain for the Nexio API environment, such as 'nexiopaysandbox' or 'nexiopay'.""" + + diff --git a/src/airbyte_api/models/source_ninjaone_rmm.py b/src/airbyte_api/models/source_ninjaone_rmm.py new file mode 100644 index 00000000..2522e195 --- /dev/null +++ b/src/airbyte_api/models/source_ninjaone_rmm.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class NinjaoneRmm(str, Enum): + NINJAONE_RMM = 'ninjaone-rmm' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceNinjaoneRmm: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Token could be generated natively by authorize section of NinjaOne swagger documentation `https://app.ninjarmm.com/apidocs/?links.active=authorization`""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[NinjaoneRmm] = dataclasses.field(default=NinjaoneRmm.NINJAONE_RMM, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_open_exchange_rates.py b/src/airbyte_api/models/source_open_exchange_rates.py new file mode 100644 index 00000000..f2a7e107 --- /dev/null +++ b/src/airbyte_api/models/source_open_exchange_rates.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class OpenExchangeRates(str, Enum): + OPEN_EXCHANGE_RATES = 'open-exchange-rates' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceOpenExchangeRates: + app_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('app_id') }}) + r"""App ID provided by Open Exchange Rates""" + start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }}) + r"""Start getting data from that date.""" + base: Optional[str] = dataclasses.field(default='USD', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('base'), 'exclude': lambda f: f is None }}) + r"""Change base currency (3-letter code, default is USD - only modifiable in paid plans)""" + SOURCE_TYPE: Final[OpenExchangeRates] = dataclasses.field(default=OpenExchangeRates.OPEN_EXCHANGE_RATES, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_opuswatch.py b/src/airbyte_api/models/source_opuswatch.py new file mode 100644 index 00000000..5fc8e204 --- /dev/null +++ b/src/airbyte_api/models/source_opuswatch.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class Opuswatch(str, Enum): + OPUSWATCH = 'opuswatch' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceOpuswatch: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + SOURCE_TYPE: Final[Opuswatch] = dataclasses.field(default=Opuswatch.OPUSWATCH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + start_date: Optional[str] = dataclasses.field(default='20250101', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'exclude': lambda f: f is None }}) + + diff --git a/src/airbyte_api/models/source_oracle_enterprise.py b/src/airbyte_api/models/source_oracle_enterprise.py new file mode 100644 index 00000000..61c48019 --- /dev/null +++ b/src/airbyte_api/models/source_oracle_enterprise.py @@ -0,0 +1,242 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Any, Dict, Final, List, Optional, Union + + +class SourceOracleEnterpriseSchemasConnectionType(str, Enum): + SID = 'sid' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceOracleEnterpriseSystemIDSID: + r"""Use Oracle System Identifier.""" + sid: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sid') }}) + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + connection_type: Optional[SourceOracleEnterpriseSchemasConnectionType] = dataclasses.field(default=SourceOracleEnterpriseSchemasConnectionType.SID, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connection_type'), 'exclude': lambda f: f is None }}) + + + + +class SourceOracleEnterpriseConnectionType(str, Enum): + SERVICE_NAME = 'service_name' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceOracleEnterpriseServiceName: + r"""Use service name.""" + service_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('service_name') }}) + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + connection_type: Optional[SourceOracleEnterpriseConnectionType] = dataclasses.field(default=SourceOracleEnterpriseConnectionType.SERVICE_NAME, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connection_type'), 'exclude': lambda f: f is None }}) + + + + +class SourceOracleEnterpriseSchemasCursorMethod(str, Enum): + CDC = 'cdc' + + +class SourceOracleEnterpriseInvalidCDCPositionBehaviorAdvanced(str, Enum): + r"""Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value in the mined logs. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.""" + FAIL_SYNC = 'Fail sync' + RE_SYNC_DATA = 'Re-sync data' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceOracleEnterpriseReadChangesUsingChangeDataCaptureCDC: + r"""Recommended - Incrementally reads new inserts, updates, and deletes using Oracle's change data capture feature. This must be enabled on your database.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + cursor_method: Optional[SourceOracleEnterpriseSchemasCursorMethod] = dataclasses.field(default=SourceOracleEnterpriseSchemasCursorMethod.CDC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cursor_method'), 'exclude': lambda f: f is None }}) + debezium_shutdown_timeout_seconds: Optional[int] = dataclasses.field(default=60, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('debezium_shutdown_timeout_seconds'), 'exclude': lambda f: f is None }}) + r"""The amount of time to allow the Debezium Engine to shut down, in seconds.""" + initial_load_timeout_hours: Optional[int] = dataclasses.field(default=8, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('initial_load_timeout_hours'), 'exclude': lambda f: f is None }}) + r"""The amount of time an initial load is allowed to continue for before catching up on CDC events.""" + invalid_cdc_cursor_position_behavior: Optional[SourceOracleEnterpriseInvalidCDCPositionBehaviorAdvanced] = dataclasses.field(default=SourceOracleEnterpriseInvalidCDCPositionBehaviorAdvanced.FAIL_SYNC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('invalid_cdc_cursor_position_behavior'), 'exclude': lambda f: f is None }}) + r"""Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value in the mined logs. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.""" + + + + +class SourceOracleEnterpriseCursorMethod(str, Enum): + USER_DEFINED = 'user_defined' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceOracleEnterpriseScanChangesWithUserDefinedCursor: + r"""Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + cursor_method: Optional[SourceOracleEnterpriseCursorMethod] = dataclasses.field(default=SourceOracleEnterpriseCursorMethod.USER_DEFINED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cursor_method'), 'exclude': lambda f: f is None }}) + + + + +class SourceOracleEnterpriseSchemasEncryptionEncryptionMethod(str, Enum): + ENCRYPTED_VERIFY_CERTIFICATE = 'encrypted_verify_certificate' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceOracleEnterpriseTLSEncryptedVerifyCertificate: + r"""Verify and use the certificate provided by the server.""" + ssl_certificate: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_certificate') }}) + r"""Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + encryption_method: Optional[SourceOracleEnterpriseSchemasEncryptionEncryptionMethod] = dataclasses.field(default=SourceOracleEnterpriseSchemasEncryptionEncryptionMethod.ENCRYPTED_VERIFY_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method'), 'exclude': lambda f: f is None }}) + + + + +class SourceOracleEnterpriseEncryptionAlgorithm(str, Enum): + r"""This parameter defines what encryption algorithm is used.""" + AES256 = 'AES256' + AES192 = 'AES192' + AES128 = 'AES128' + THREE_DES168 = '3DES168' + THREE_DES112 = '3DES112' + DES = 'DES' + + +class SourceOracleEnterpriseSchemasEncryptionMethod(str, Enum): + CLIENT_NNE = 'client_nne' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceOracleEnterpriseNativeNetworkEncryptionNNE: + r"""The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + encryption_algorithm: Optional[SourceOracleEnterpriseEncryptionAlgorithm] = dataclasses.field(default=SourceOracleEnterpriseEncryptionAlgorithm.AES256, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_algorithm'), 'exclude': lambda f: f is None }}) + r"""This parameter defines what encryption algorithm is used.""" + encryption_method: Optional[SourceOracleEnterpriseSchemasEncryptionMethod] = dataclasses.field(default=SourceOracleEnterpriseSchemasEncryptionMethod.CLIENT_NNE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method'), 'exclude': lambda f: f is None }}) + + + + +class SourceOracleEnterpriseEncryptionMethod(str, Enum): + UNENCRYPTED = 'unencrypted' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceOracleEnterpriseUnencrypted: + r"""Data transfer will not be encrypted.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + encryption_method: Optional[SourceOracleEnterpriseEncryptionMethod] = dataclasses.field(default=SourceOracleEnterpriseEncryptionMethod.UNENCRYPTED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method'), 'exclude': lambda f: f is None }}) + + + + +class OracleEnterprise(str, Enum): + ORACLE_ENTERPRISE = 'oracle-enterprise' + + +class SourceOracleEnterpriseSchemasTunnelMethodTunnelMethod(str, Enum): + SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceOracleEnterprisePasswordAuthentication: + r"""Connect through a jump server tunnel host using username and password authentication""" + tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }}) + r"""Hostname of the jump server host that allows inbound ssh tunnel.""" + tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }}) + r"""OS-level username for logging into the jump server host""" + tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }}) + r"""OS-level password for logging into the jump server host""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[SourceOracleEnterpriseSchemasTunnelMethodTunnelMethod] = dataclasses.field(default=SourceOracleEnterpriseSchemasTunnelMethodTunnelMethod.SSH_PASSWORD_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) + tunnel_port: Optional[int] = dataclasses.field(default=22, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port'), 'exclude': lambda f: f is None }}) + r"""Port on the proxy/jump server that accepts inbound ssh connections.""" + + + + +class SourceOracleEnterpriseSchemasTunnelMethod(str, Enum): + SSH_KEY_AUTH = 'SSH_KEY_AUTH' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceOracleEnterpriseSSHKeyAuthentication: + r"""Connect through a jump server tunnel host using username and ssh key""" + ssh_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssh_key') }}) + r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )""" + tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }}) + r"""Hostname of the jump server host that allows inbound ssh tunnel.""" + tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }}) + r"""OS-level username for logging into the jump server host""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[SourceOracleEnterpriseSchemasTunnelMethod] = dataclasses.field(default=SourceOracleEnterpriseSchemasTunnelMethod.SSH_KEY_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) + tunnel_port: Optional[int] = dataclasses.field(default=22, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port'), 'exclude': lambda f: f is None }}) + r"""Port on the proxy/jump server that accepts inbound ssh connections.""" + + + + +class SourceOracleEnterpriseTunnelMethod(str, Enum): + NO_TUNNEL = 'NO_TUNNEL' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceOracleEnterpriseNoTunnel: + r"""No ssh tunnel needed to connect to database""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[SourceOracleEnterpriseTunnelMethod] = dataclasses.field(default=SourceOracleEnterpriseTunnelMethod.NO_TUNNEL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceOracleEnterprise: + connection_data: SourceOracleEnterpriseConnectBy = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connection_data') }}) + r"""The scheme by which to establish a database connection.""" + cursor: SourceOracleEnterpriseUpdateMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cursor') }}) + r"""Configures how data is extracted from the database.""" + encryption: SourceOracleEnterpriseEncryption = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption') }}) + r"""The encryption method with is used when communicating with the database.""" + host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }}) + r"""Hostname of the database.""" + tunnel_method: SourceOracleEnterpriseSSHTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }}) + r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + r"""The username which is used to access the database.""" + check_privileges: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('check_privileges'), 'exclude': lambda f: f is None }}) + r"""When this feature is enabled, during schema discovery the connector will query each table or view individually to check access privileges and inaccessible tables, views, or columns therein will be removed. In large schemas, this might cause schema discovery to take too long, in which case it might be advisable to disable this feature.""" + checkpoint_target_interval_seconds: Optional[int] = dataclasses.field(default=300, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('checkpoint_target_interval_seconds'), 'exclude': lambda f: f is None }}) + r"""How often (in seconds) a stream should checkpoint, when possible.""" + concurrency: Optional[int] = dataclasses.field(default=1, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('concurrency'), 'exclude': lambda f: f is None }}) + r"""Maximum number of concurrent queries to the database.""" + jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) + r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).""" + password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + r"""The password associated with the username.""" + port: Optional[int] = dataclasses.field(default=1521, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port'), 'exclude': lambda f: f is None }}) + r"""Port of the database. + Oracle Corporations recommends the following port numbers: + 1521 - Default listening port for client connections to the listener. + 2484 - Recommended and officially registered listening port for client connections to the listener using TCP/IP with SSL. + """ + schemas: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schemas'), 'exclude': lambda f: f is None }}) + r"""The list of schemas to sync from. Defaults to user. Case sensitive.""" + SOURCE_TYPE: Final[OracleEnterprise] = dataclasses.field(default=OracleEnterprise.ORACLE_ENTERPRISE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + + +SourceOracleEnterpriseConnectBy = Union[SourceOracleEnterpriseServiceName, SourceOracleEnterpriseSystemIDSID] + +SourceOracleEnterpriseUpdateMethod = Union[SourceOracleEnterpriseScanChangesWithUserDefinedCursor, SourceOracleEnterpriseReadChangesUsingChangeDataCaptureCDC] + +SourceOracleEnterpriseEncryption = Union[SourceOracleEnterpriseUnencrypted, SourceOracleEnterpriseNativeNetworkEncryptionNNE, SourceOracleEnterpriseTLSEncryptedVerifyCertificate] + +SourceOracleEnterpriseSSHTunnelMethod = Union[SourceOracleEnterpriseNoTunnel, SourceOracleEnterpriseSSHKeyAuthentication, SourceOracleEnterprisePasswordAuthentication] diff --git a/src/airbyte_api/models/source_orbit.py b/src/airbyte_api/models/source_orbit.py deleted file mode 100644 index 6990201c..00000000 --- a/src/airbyte_api/models/source_orbit.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from airbyte_api import utils -from dataclasses_json import Undefined, dataclass_json -from enum import Enum -from typing import Final, Optional - - -class Orbit(str, Enum): - ORBIT = 'orbit' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SourceOrbit: - api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }}) - r"""Authorizes you to work with Orbit workspaces associated with the token.""" - workspace: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspace') }}) - r"""The unique name of the workspace that your API token is associated with.""" - SOURCE_TYPE: Final[Orbit] = dataclasses.field(default=Orbit.ORBIT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) - start_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'exclude': lambda f: f is None }}) - r"""Date in the format 2022-06-26. Only load members whose last activities are after this date.""" - - diff --git a/src/airbyte_api/models/source_outbrain_amplify.py b/src/airbyte_api/models/source_outbrain_amplify.py index 9a4bc876..bab2315e 100644 --- a/src/airbyte_api/models/source_outbrain_amplify.py +++ b/src/airbyte_api/models/source_outbrain_amplify.py @@ -8,6 +8,12 @@ from typing import Final, Optional, Union +class DefinitionOfConversionCountInReports(str, Enum): + r"""The definition of conversion count in reports. See the docs.""" + CLICK_VIEW_TIME = 'click/view_time' + CONVERSION_TIME = 'conversion_time' + + class BothUsernameAndPasswordIsRequiredForAuthenticationRequest(str, Enum): USERNAME_PASSWORD = 'username_password' @@ -63,6 +69,8 @@ class SourceOutbrainAmplify: r"""Credentials for making authenticated requests requires either username/password or access_token.""" start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }}) r"""Date in the format YYYY-MM-DD eg. 2017-01-25. Any data before this date will not be replicated.""" + conversion_count: Optional[DefinitionOfConversionCountInReports] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('conversion_count'), 'exclude': lambda f: f is None }}) + r"""The definition of conversion count in reports. See the docs.""" end_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'exclude': lambda f: f is None }}) r"""Date in the format YYYY-MM-DD.""" geo_location_breakdown: Optional[GranularityForGeoLocationRegion] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('geo_location_breakdown'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_outreach.py b/src/airbyte_api/models/source_outreach.py index d5100893..9163c310 100644 --- a/src/airbyte_api/models/source_outreach.py +++ b/src/airbyte_api/models/source_outreach.py @@ -2,8 +2,10 @@ from __future__ import annotations import dataclasses +import dateutil.parser from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json +from datetime import datetime from enum import Enum from typing import Final @@ -23,8 +25,8 @@ class SourceOutreach: r"""A Redirect URI is the location where the authorization server sends the user once the app has been successfully authorized and granted an authorization code or access token.""" refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }}) r"""The token for obtaining the new access token.""" - start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }}) - r"""The date from which you'd like to replicate data for Outreach API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + r"""The date from which you'd like to replicate data for Outreach API, in the format YYYY-MM-DDT00:00:00.000Z. All data generated after this date will be replicated.""" SOURCE_TYPE: Final[Outreach] = dataclasses.field(default=Outreach.OUTREACH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_paddle.py b/src/airbyte_api/models/source_paddle.py new file mode 100644 index 00000000..dbfc30bd --- /dev/null +++ b/src/airbyte_api/models/source_paddle.py @@ -0,0 +1,33 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class SourcePaddleEnvironment(str, Enum): + r"""The environment for the Paddle API, either 'sandbox' or 'live'.""" + API = 'api' + SANDBOX_API = 'sandbox-api' + + +class Paddle(str, Enum): + PADDLE = 'paddle' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourcePaddle: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your Paddle API key. You can generate it by navigating to Paddle > Developer tools > Authentication > Generate API key. Treat this key like a password and keep it secure.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + environment: Optional[SourcePaddleEnvironment] = dataclasses.field(default=SourcePaddleEnvironment.API, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('environment'), 'exclude': lambda f: f is None }}) + r"""The environment for the Paddle API, either 'sandbox' or 'live'.""" + SOURCE_TYPE: Final[Paddle] = dataclasses.field(default=Paddle.PADDLE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_pagerduty.py b/src/airbyte_api/models/source_pagerduty.py new file mode 100644 index 00000000..637a3523 --- /dev/null +++ b/src/airbyte_api/models/source_pagerduty.py @@ -0,0 +1,43 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, List, Optional + + +class ServiceDetails(str, Enum): + ESCALATION_POLICIES = 'escalation_policies' + TEAMS = 'teams' + INTEGRATIONS = 'integrations' + AUTO_PAUSE_NOTIFICATIONS_PARAMETERS = 'auto_pause_notifications_parameters' + + +class Pagerduty(str, Enum): + PAGERDUTY = 'pagerduty' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourcePagerduty: + token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token') }}) + r"""API key for PagerDuty API authentication""" + cutoff_days: Optional[int] = dataclasses.field(default=90, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cutoff_days'), 'exclude': lambda f: f is None }}) + r"""Fetch pipelines updated in the last number of days""" + default_severity: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('default_severity'), 'exclude': lambda f: f is None }}) + r"""A default severity category if not present""" + exclude_services: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('exclude_services'), 'exclude': lambda f: f is None }}) + r"""List of PagerDuty service names to ignore incidents from. If not set, all incidents will be pulled.""" + incident_log_entries_overview: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('incident_log_entries_overview'), 'exclude': lambda f: f is None }}) + r"""If true, will return a subset of log entries that show only the most important changes to the incident.""" + max_retries: Optional[int] = dataclasses.field(default=5, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_retries'), 'exclude': lambda f: f is None }}) + r"""Maximum number of PagerDuty API request retries to perform upon connection errors. The source will pause for an exponentially increasing number of seconds before retrying.""" + page_size: Optional[int] = dataclasses.field(default=25, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('page_size'), 'exclude': lambda f: f is None }}) + r"""page size to use when querying PagerDuty API""" + service_details: Optional[List[ServiceDetails]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('service_details'), 'exclude': lambda f: f is None }}) + r"""List of PagerDuty service additional details to include.""" + SOURCE_TYPE: Final[Pagerduty] = dataclasses.field(default=Pagerduty.PAGERDUTY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_pardot.py b/src/airbyte_api/models/source_pardot.py index 27e6bdf6..a45dbde1 100644 --- a/src/airbyte_api/models/source_pardot.py +++ b/src/airbyte_api/models/source_pardot.py @@ -2,8 +2,10 @@ from __future__ import annotations import dataclasses +import dateutil.parser from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json +from datetime import datetime from enum import Enum from typing import Final, Optional @@ -25,8 +27,10 @@ class SourcePardot: r"""Salesforce Refresh Token used for Airbyte to access your Salesforce account. If you don't know what this is, follow this guide to retrieve it.""" is_sandbox: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('is_sandbox'), 'exclude': lambda f: f is None }}) r"""Whether or not the the app is in a Salesforce sandbox. If you do not know what this, assume it is false.""" + page_size: Optional[str] = dataclasses.field(default='1000', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('page_size'), 'exclude': lambda f: f is None }}) + r"""The maximum number of records to return per request""" SOURCE_TYPE: Final[Pardot] = dataclasses.field(default=Pardot.PARDOT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) - start_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'exclude': lambda f: f is None }}) - r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. Leave blank to skip this filter""" + start_date: Optional[datetime] = dataclasses.field(default=dateutil.parser.isoparse('2007-01-01T00:00:00Z'), metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) + r"""UTC date and time in the format 2000-01-01T00:00:00Z. Any data before this date will not be replicated. Defaults to the year Pardot was released.""" diff --git a/src/airbyte_api/models/source_partnerize.py b/src/airbyte_api/models/source_partnerize.py new file mode 100644 index 00000000..5d2fa0ab --- /dev/null +++ b/src/airbyte_api/models/source_partnerize.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Partnerize(str, Enum): + PARTNERIZE = 'partnerize' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourcePartnerize: + application_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('application_key') }}) + r"""The application key identifies the network you are making the request against. Find it in your account settings under 'User Application Key' at https://console.partnerize.com.""" + user_api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('user_api_key') }}) + r"""The user API key identifies the user on whose behalf the request is made. Find it in your account settings under 'User API Key' at https://console.partnerize.com.""" + SOURCE_TYPE: Final[Partnerize] = dataclasses.field(default=Partnerize.PARTNERIZE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_partnerstack.py b/src/airbyte_api/models/source_partnerstack.py new file mode 100644 index 00000000..a3bcd782 --- /dev/null +++ b/src/airbyte_api/models/source_partnerstack.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class Partnerstack(str, Enum): + PARTNERSTACK = 'partnerstack' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourcePartnerstack: + private_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('private_key') }}) + r"""The Live Private Key for a Partnerstack account.""" + public_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('public_key') }}) + r"""The Live Public Key for a Partnerstack account.""" + SOURCE_TYPE: Final[Partnerstack] = dataclasses.field(default=Partnerstack.PARTNERSTACK, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + start_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'exclude': lambda f: f is None }}) + r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.""" + + diff --git a/src/airbyte_api/models/source_payfit.py b/src/airbyte_api/models/source_payfit.py new file mode 100644 index 00000000..12ee0242 --- /dev/null +++ b/src/airbyte_api/models/source_payfit.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Payfit(str, Enum): + PAYFIT = 'payfit' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourcePayfit: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + company_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('company_id') }}) + SOURCE_TYPE: Final[Payfit] = dataclasses.field(default=Payfit.PAYFIT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_perigon.py b/src/airbyte_api/models/source_perigon.py new file mode 100644 index 00000000..47356811 --- /dev/null +++ b/src/airbyte_api/models/source_perigon.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Perigon(str, Enum): + PERIGON = 'perigon' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourcePerigon: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your API key for authenticating with the Perigon API. Obtain it by creating an account at https://www.perigon.io/sign-up and verifying your email. The API key will be visible on your account dashboard.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Perigon] = dataclasses.field(default=Perigon.PERIGON, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_phyllo.py b/src/airbyte_api/models/source_phyllo.py new file mode 100644 index 00000000..97c67444 --- /dev/null +++ b/src/airbyte_api/models/source_phyllo.py @@ -0,0 +1,36 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class SourcePhylloEnvironment(str, Enum): + r"""The environment for the API (e.g., 'api.sandbox', 'api.staging', 'api')""" + API_SANDBOX = 'api.sandbox' + API_STAGING = 'api.staging' + API = 'api' + + +class Phyllo(str, Enum): + PHYLLO = 'phyllo' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourcePhyllo: + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + r"""Your Client ID for the Phyllo API. You can find this in the Phyllo Developer Dashboard under API credentials.""" + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + r"""Your Client Secret for the Phyllo API. You can find this in the Phyllo Developer Dashboard under API credentials.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + environment: Optional[SourcePhylloEnvironment] = dataclasses.field(default=SourcePhylloEnvironment.API, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('environment'), 'exclude': lambda f: f is None }}) + r"""The environment for the API (e.g., 'api.sandbox', 'api.staging', 'api')""" + SOURCE_TYPE: Final[Phyllo] = dataclasses.field(default=Phyllo.PHYLLO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_pingdom.py b/src/airbyte_api/models/source_pingdom.py new file mode 100644 index 00000000..5ba14064 --- /dev/null +++ b/src/airbyte_api/models/source_pingdom.py @@ -0,0 +1,32 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Resolution(str, Enum): + HOUR = 'hour' + DAY = 'day' + WEEK = 'week' + + +class Pingdom(str, Enum): + PINGDOM = 'pingdom' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourcePingdom: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + probes: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('probes'), 'exclude': lambda f: f is None }}) + resolution: Optional[Resolution] = dataclasses.field(default=Resolution.HOUR, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('resolution'), 'exclude': lambda f: f is None }}) + SOURCE_TYPE: Final[Pingdom] = dataclasses.field(default=Pingdom.PINGDOM, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_pinterest.py b/src/airbyte_api/models/source_pinterest.py index 23dd8237..51ccd4b3 100644 --- a/src/airbyte_api/models/source_pinterest.py +++ b/src/airbyte_api/models/source_pinterest.py @@ -252,6 +252,8 @@ class Status(str, Enum): @dataclasses.dataclass class SourcePinterest: UNSET='__SPEAKEASY_UNSET__' + account_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('account_id'), 'exclude': lambda f: f is None }}) + r"""The Pinterest account ID you want to fetch data for. This ID must be provided to filter the data for a specific account.""" credentials: Optional[OAuth20] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) custom_reports: Optional[List[ReportConfig]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_reports'), 'exclude': lambda f: f is None }}) r"""A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on \\"add\\" to fill this field.""" diff --git a/src/airbyte_api/models/source_pokeapi.py b/src/airbyte_api/models/source_pokeapi.py index 87b9c7d0..afdc0ede 100644 --- a/src/airbyte_api/models/source_pokeapi.py +++ b/src/airbyte_api/models/source_pokeapi.py @@ -38,10 +38,10 @@ class PokemonName(str, Enum): RAICHU = 'raichu' SANDSHREW = 'sandshrew' SANDSLASH = 'sandslash' - NIDORANF = 'nidoranf' + NIDORAN_F = 'nidoran-f' NIDORINA = 'nidorina' NIDOQUEEN = 'nidoqueen' - NIDORANM = 'nidoranm' + NIDORAN_M = 'nidoran-m' NIDORINO = 'nidorino' NIDOKING = 'nidoking' CLEFAIRY = 'clefairy' diff --git a/src/airbyte_api/models/source_poplar.py b/src/airbyte_api/models/source_poplar.py new file mode 100644 index 00000000..88dc6671 --- /dev/null +++ b/src/airbyte_api/models/source_poplar.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Poplar(str, Enum): + POPLAR = 'poplar' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourcePoplar: + access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) + r"""Your Poplar API Access Token. Generate it from the [API Credentials page](https://app.heypoplar.com/credentials) in your account. Use a production token for live data or a test token for testing purposes.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Poplar] = dataclasses.field(default=Poplar.POPLAR, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_printify.py b/src/airbyte_api/models/source_printify.py new file mode 100644 index 00000000..44062ebd --- /dev/null +++ b/src/airbyte_api/models/source_printify.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Printify(str, Enum): + PRINTIFY = 'printify' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourcePrintify: + api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }}) + r"""Your Printify API token. Obtain it from your Printify account settings.""" + SOURCE_TYPE: Final[Printify] = dataclasses.field(default=Printify.PRINTIFY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_quickbooks.py b/src/airbyte_api/models/source_quickbooks.py index 851b8c85..caf3ebc9 100644 --- a/src/airbyte_api/models/source_quickbooks.py +++ b/src/airbyte_api/models/source_quickbooks.py @@ -7,16 +7,20 @@ from dataclasses_json import Undefined, dataclass_json from datetime import datetime from enum import Enum -from typing import Final, Optional, Union +from typing import Final, Optional class SourceQuickbooksAuthType(str, Enum): OAUTH2_0 = 'oauth2.0' +class Quickbooks(str, Enum): + QUICKBOOKS = 'quickbooks' + + @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class SourceQuickbooksOAuth20: +class SourceQuickbooks: access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) r"""Access token for making authenticated requests.""" client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) @@ -27,27 +31,13 @@ class SourceQuickbooksOAuth20: r"""Labeled Company ID. The Make API Calls panel is populated with the realm id and the current access token.""" refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }}) r"""A token used when refreshing the access token.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + r"""The default value to use if no bookmark exists for an endpoint (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this date will not be replicated.""" token_expiry_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token_expiry_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) r"""The date-time when the access token should be refreshed.""" AUTH_TYPE: Final[Optional[SourceQuickbooksAuthType]] = dataclasses.field(default=SourceQuickbooksAuthType.OAUTH2_0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) - - - - -class Quickbooks(str, Enum): - QUICKBOOKS = 'quickbooks' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SourceQuickbooks: - credentials: SourceQuickbooksAuthorizationMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }}) - start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) - r"""The default value to use if no bookmark exists for an endpoint (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this date will not be replicated.""" sandbox: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sandbox'), 'exclude': lambda f: f is None }}) r"""Determines whether to use the sandbox or production environment.""" SOURCE_TYPE: Final[Quickbooks] = dataclasses.field(default=Quickbooks.QUICKBOOKS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) - -SourceQuickbooksAuthorizationMethod = Union[SourceQuickbooksOAuth20] diff --git a/src/airbyte_api/models/source_recharge.py b/src/airbyte_api/models/source_recharge.py index 2bcb38a5..649a5289 100644 --- a/src/airbyte_api/models/source_recharge.py +++ b/src/airbyte_api/models/source_recharge.py @@ -21,6 +21,8 @@ class SourceRecharge: r"""The value of the Access Token generated. See the docs for more information.""" start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) r"""The date from which you'd like to replicate data for Recharge API, in the format YYYY-MM-DDT00:00:00Z. Any data before this date will not be replicated.""" + lookback_window_days: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window_days'), 'exclude': lambda f: f is None }}) + r"""Specifies how many days of historical data should be reloaded each time the recharge connector runs.""" SOURCE_TYPE: Final[Recharge] = dataclasses.field(default=Recharge.RECHARGE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) use_orders_deprecated_api: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('use_orders_deprecated_api'), 'exclude': lambda f: f is None }}) r"""Define whether or not the `Orders` stream should use the deprecated `2021-01` API version, or use `2021-11`, otherwise.""" diff --git a/src/airbyte_api/models/source_recurly.py b/src/airbyte_api/models/source_recurly.py index 75cb5790..ed9e8df8 100644 --- a/src/airbyte_api/models/source_recurly.py +++ b/src/airbyte_api/models/source_recurly.py @@ -17,10 +17,16 @@ class Recurly(str, Enum): class SourceRecurly: api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) r"""Recurly API Key. See the docs for more information on how to generate this key.""" + accounts_step_days: Optional[int] = dataclasses.field(default=30, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('accounts_step_days'), 'exclude': lambda f: f is None }}) + r"""Days in length for each API call to get data from the accounts stream. Smaller values will result in more API calls but better concurrency.""" begin_time: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('begin_time'), 'exclude': lambda f: f is None }}) r"""ISO8601 timestamp from which the replication from Recurly API will start from.""" end_time: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_time'), 'exclude': lambda f: f is None }}) r"""ISO8601 timestamp to which the replication from Recurly API will stop. Records after that date won't be imported.""" + is_sandbox: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('is_sandbox'), 'exclude': lambda f: f is None }}) + r"""Set to true for sandbox accounts (400 requests/min, all types). Defaults to false for production accounts (1,000 GET requests/min).""" + num_workers: Optional[int] = dataclasses.field(default=10, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_workers'), 'exclude': lambda f: f is None }}) + r"""The number of worker threads to use for the sync.""" SOURCE_TYPE: Final[Recurly] = dataclasses.field(default=Recurly.RECURLY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_retailexpress_by_maropost.py b/src/airbyte_api/models/source_retailexpress_by_maropost.py new file mode 100644 index 00000000..f13dc499 --- /dev/null +++ b/src/airbyte_api/models/source_retailexpress_by_maropost.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class RetailexpressByMaropost(str, Enum): + RETAILEXPRESS_BY_MAROPOST = 'retailexpress-by-maropost' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceRetailexpressByMaropost: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[RetailexpressByMaropost] = dataclasses.field(default=RetailexpressByMaropost.RETAILEXPRESS_BY_MAROPOST, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_ringcentral.py b/src/airbyte_api/models/source_ringcentral.py new file mode 100644 index 00000000..3b0ea10b --- /dev/null +++ b/src/airbyte_api/models/source_ringcentral.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Ringcentral(str, Enum): + RINGCENTRAL = 'ringcentral' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceRingcentral: + account_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('account_id') }}) + r"""Could be seen at response to basic api call to an endpoint with ~ operator. Example- (https://platform.devtest.ringcentral.com/restapi/v1.0/account/~/extension/~/business-hours)""" + auth_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_token') }}) + r"""Token could be recieved by following instructions at https://developers.ringcentral.com/api-reference/authentication""" + extension_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('extension_id') }}) + r"""Could be seen at response to basic api call to an endpoint with ~ operator. Example- (https://platform.devtest.ringcentral.com/restapi/v1.0/account/~/extension/~/business-hours)""" + SOURCE_TYPE: Final[Ringcentral] = dataclasses.field(default=Ringcentral.RINGCENTRAL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_rocket_chat.py b/src/airbyte_api/models/source_rocket_chat.py new file mode 100644 index 00000000..ecda6581 --- /dev/null +++ b/src/airbyte_api/models/source_rocket_chat.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class RocketChat(str, Enum): + ROCKET_CHAT = 'rocket-chat' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceRocketChat: + endpoint: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('endpoint') }}) + r"""Your rocket.chat instance URL.""" + token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('token') }}) + r"""Your API Token. See here. The token is case sensitive.""" + user_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('user_id') }}) + r"""Your User Id.""" + SOURCE_TYPE: Final[RocketChat] = dataclasses.field(default=RocketChat.ROCKET_CHAT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_s3.py b/src/airbyte_api/models/source_s3.py index 63c07d60..162526c9 100644 --- a/src/airbyte_api/models/source_s3.py +++ b/src/airbyte_api/models/source_s3.py @@ -10,28 +10,30 @@ from typing import Final, List, Optional, Union -class SourceS3DeliveryType(str, Enum): +class SourceS3SchemasDeliveryType(str, Enum): USE_FILE_TRANSFER = 'use_file_transfer' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class CopyRawFiles: +class SourceS3CopyRawFiles: r"""Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.""" - DELIVERY_TYPE: Final[Optional[SourceS3DeliveryType]] = dataclasses.field(default=SourceS3DeliveryType.USE_FILE_TRANSFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_type'), 'exclude': lambda f: f is None }}) + DELIVERY_TYPE: Final[Optional[SourceS3SchemasDeliveryType]] = dataclasses.field(default=SourceS3SchemasDeliveryType.USE_FILE_TRANSFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_type'), 'exclude': lambda f: f is None }}) + preserve_directory_structure: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('preserve_directory_structure'), 'exclude': lambda f: f is None }}) + r"""If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled.""" -class DeliveryType(str, Enum): +class SourceS3DeliveryType(str, Enum): USE_RECORDS_TRANSFER = 'use_records_transfer' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class ReplicateRecords: +class SourceS3ReplicateRecords: r"""Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.""" - DELIVERY_TYPE: Final[Optional[DeliveryType]] = dataclasses.field(default=DeliveryType.USE_RECORDS_TRANSFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_type'), 'exclude': lambda f: f is None }}) + DELIVERY_TYPE: Final[Optional[SourceS3DeliveryType]] = dataclasses.field(default=SourceS3DeliveryType.USE_RECORDS_TRANSFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_type'), 'exclude': lambda f: f is None }}) @@ -252,7 +254,7 @@ class SourceS3: r"""In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.""" aws_secret_access_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_secret_access_key'), 'exclude': lambda f: f is None }}) r"""In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.""" - delivery_method: Optional[DeliveryMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_method'), 'exclude': lambda f: f is None }}) + delivery_method: Optional[SourceS3DeliveryMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_method'), 'exclude': lambda f: f is None }}) endpoint: Optional[str] = dataclasses.field(default='', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('endpoint'), 'exclude': lambda f: f is None }}) r"""Endpoint to an S3 compatible service. Leave empty to use AWS.""" region_name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region_name'), 'exclude': lambda f: f is None }}) @@ -265,7 +267,7 @@ class SourceS3: -DeliveryMethod = Union[ReplicateRecords, CopyRawFiles] +SourceS3DeliveryMethod = Union[SourceS3ReplicateRecords, SourceS3CopyRawFiles] SourceS3Processing = Union[SourceS3Local] diff --git a/src/airbyte_api/models/source_salesforce.py b/src/airbyte_api/models/source_salesforce.py index 87b50714..5822d6fb 100644 --- a/src/airbyte_api/models/source_salesforce.py +++ b/src/airbyte_api/models/source_salesforce.py @@ -10,7 +10,7 @@ from typing import Final, List, Optional -class AuthType(str, Enum): +class SourceSalesforceAuthType(str, Enum): CLIENT = 'Client' @@ -47,7 +47,7 @@ class SourceSalesforce: r"""Enter your Salesforce developer application's Client secret""" refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }}) r"""Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce account.""" - AUTH_TYPE: Final[Optional[AuthType]] = dataclasses.field(default=AuthType.CLIENT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) + AUTH_TYPE: Final[Optional[SourceSalesforceAuthType]] = dataclasses.field(default=SourceSalesforceAuthType.CLIENT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) force_use_bulk_api: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('force_use_bulk_api'), 'exclude': lambda f: f is None }}) r"""Toggle to use Bulk API (this might cause empty fields for some streams)""" is_sandbox: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('is_sandbox'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_sap_hana_enterprise.py b/src/airbyte_api/models/source_sap_hana_enterprise.py new file mode 100644 index 00000000..1277f63b --- /dev/null +++ b/src/airbyte_api/models/source_sap_hana_enterprise.py @@ -0,0 +1,202 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Any, Dict, Final, List, Optional, Union + + +class SourceSapHanaEnterpriseSchemasCursorMethod(str, Enum): + CDC = 'cdc' + + +class SourceSapHanaEnterpriseInvalidCDCPositionBehaviorAdvanced(str, Enum): + r"""Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value in the mined logs. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.""" + FAIL_SYNC = 'Fail sync' + RE_SYNC_DATA = 'Re-sync data' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSapHanaEnterpriseReadChangesUsingChangeDataCaptureCDC: + r"""Recommended - Incrementally reads new inserts, updates, and deletes using change data capture feature. This must be enabled on your database.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + cursor_method: Optional[SourceSapHanaEnterpriseSchemasCursorMethod] = dataclasses.field(default=SourceSapHanaEnterpriseSchemasCursorMethod.CDC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cursor_method'), 'exclude': lambda f: f is None }}) + initial_load_timeout_hours: Optional[int] = dataclasses.field(default=8, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('initial_load_timeout_hours'), 'exclude': lambda f: f is None }}) + r"""The amount of time an initial load is allowed to continue for before catching up on CDC events.""" + invalid_cdc_cursor_position_behavior: Optional[SourceSapHanaEnterpriseInvalidCDCPositionBehaviorAdvanced] = dataclasses.field(default=SourceSapHanaEnterpriseInvalidCDCPositionBehaviorAdvanced.FAIL_SYNC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('invalid_cdc_cursor_position_behavior'), 'exclude': lambda f: f is None }}) + r"""Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value in the mined logs. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss.""" + + + + +class SourceSapHanaEnterpriseCursorMethod(str, Enum): + USER_DEFINED = 'user_defined' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSapHanaEnterpriseScanChangesWithUserDefinedCursor: + r"""Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + cursor_method: Optional[SourceSapHanaEnterpriseCursorMethod] = dataclasses.field(default=SourceSapHanaEnterpriseCursorMethod.USER_DEFINED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cursor_method'), 'exclude': lambda f: f is None }}) + + + + +class SourceSapHanaEnterpriseSchemasEncryptionEncryptionMethod(str, Enum): + ENCRYPTED_VERIFY_CERTIFICATE = 'encrypted_verify_certificate' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSapHanaEnterpriseTLSEncryptedVerifyCertificate: + r"""Verify and use the certificate provided by the server.""" + ssl_certificate: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_certificate') }}) + r"""Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + encryption_method: Optional[SourceSapHanaEnterpriseSchemasEncryptionEncryptionMethod] = dataclasses.field(default=SourceSapHanaEnterpriseSchemasEncryptionEncryptionMethod.ENCRYPTED_VERIFY_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method'), 'exclude': lambda f: f is None }}) + + + + +class SourceSapHanaEnterpriseEncryptionAlgorithm(str, Enum): + r"""This parameter defines what encryption algorithm is used.""" + AES256 = 'AES256' + RC4_56 = 'RC4_56' + THREE_DES168 = '3DES168' + + +class SourceSapHanaEnterpriseSchemasEncryptionMethod(str, Enum): + CLIENT_NNE = 'client_nne' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSapHanaEnterpriseNativeNetworkEncryptionNNE: + r"""The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + encryption_algorithm: Optional[SourceSapHanaEnterpriseEncryptionAlgorithm] = dataclasses.field(default=SourceSapHanaEnterpriseEncryptionAlgorithm.AES256, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_algorithm'), 'exclude': lambda f: f is None }}) + r"""This parameter defines what encryption algorithm is used.""" + encryption_method: Optional[SourceSapHanaEnterpriseSchemasEncryptionMethod] = dataclasses.field(default=SourceSapHanaEnterpriseSchemasEncryptionMethod.CLIENT_NNE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method'), 'exclude': lambda f: f is None }}) + + + + +class SourceSapHanaEnterpriseEncryptionMethod(str, Enum): + UNENCRYPTED = 'unencrypted' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSapHanaEnterpriseUnencrypted: + r"""Data transfer will not be encrypted.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + encryption_method: Optional[SourceSapHanaEnterpriseEncryptionMethod] = dataclasses.field(default=SourceSapHanaEnterpriseEncryptionMethod.UNENCRYPTED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method'), 'exclude': lambda f: f is None }}) + + + + +class SapHanaEnterprise(str, Enum): + SAP_HANA_ENTERPRISE = 'sap-hana-enterprise' + + +class SourceSapHanaEnterpriseSchemasTunnelMethodTunnelMethod(str, Enum): + SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSapHanaEnterprisePasswordAuthentication: + r"""Connect through a jump server tunnel host using username and password authentication""" + tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }}) + r"""Hostname of the jump server host that allows inbound ssh tunnel.""" + tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }}) + r"""OS-level username for logging into the jump server host""" + tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }}) + r"""OS-level password for logging into the jump server host""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[SourceSapHanaEnterpriseSchemasTunnelMethodTunnelMethod] = dataclasses.field(default=SourceSapHanaEnterpriseSchemasTunnelMethodTunnelMethod.SSH_PASSWORD_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) + tunnel_port: Optional[int] = dataclasses.field(default=22, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port'), 'exclude': lambda f: f is None }}) + r"""Port on the proxy/jump server that accepts inbound ssh connections.""" + + + + +class SourceSapHanaEnterpriseSchemasTunnelMethod(str, Enum): + SSH_KEY_AUTH = 'SSH_KEY_AUTH' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSapHanaEnterpriseSSHKeyAuthentication: + r"""Connect through a jump server tunnel host using username and ssh key""" + ssh_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssh_key') }}) + r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )""" + tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }}) + r"""Hostname of the jump server host that allows inbound ssh tunnel.""" + tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }}) + r"""OS-level username for logging into the jump server host""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[SourceSapHanaEnterpriseSchemasTunnelMethod] = dataclasses.field(default=SourceSapHanaEnterpriseSchemasTunnelMethod.SSH_KEY_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) + tunnel_port: Optional[int] = dataclasses.field(default=22, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port'), 'exclude': lambda f: f is None }}) + r"""Port on the proxy/jump server that accepts inbound ssh connections.""" + + + + +class SourceSapHanaEnterpriseTunnelMethod(str, Enum): + NO_TUNNEL = 'NO_TUNNEL' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSapHanaEnterpriseNoTunnel: + r"""No ssh tunnel needed to connect to database""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[SourceSapHanaEnterpriseTunnelMethod] = dataclasses.field(default=SourceSapHanaEnterpriseTunnelMethod.NO_TUNNEL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSapHanaEnterprise: + cursor: SourceSapHanaEnterpriseUpdateMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cursor') }}) + r"""Configures how data is extracted from the database.""" + encryption: SourceSapHanaEnterpriseEncryption = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption') }}) + r"""The encryption method with is used when communicating with the database.""" + host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }}) + r"""Hostname of the database.""" + tunnel_method: SourceSapHanaEnterpriseSSHTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }}) + r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + r"""The username which is used to access the database.""" + check_privileges: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('check_privileges'), 'exclude': lambda f: f is None }}) + r"""When this feature is enabled, during schema discovery the connector will query each table or view individually to check access privileges and inaccessible tables, views, or columns therein will be removed. In large schemas, this might cause schema discovery to take too long, in which case it might be advisable to disable this feature.""" + checkpoint_target_interval_seconds: Optional[int] = dataclasses.field(default=300, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('checkpoint_target_interval_seconds'), 'exclude': lambda f: f is None }}) + r"""How often (in seconds) a stream should checkpoint, when possible.""" + concurrency: Optional[int] = dataclasses.field(default=1, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('concurrency'), 'exclude': lambda f: f is None }}) + r"""Maximum number of concurrent queries to the database.""" + jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) + r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).""" + password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + r"""The password associated with the username.""" + port: Optional[int] = dataclasses.field(default=443, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port'), 'exclude': lambda f: f is None }}) + r"""Port of the database. + SapHana Corporations recommends the following port numbers: + 443 - Default listening port for SAP HANA cloud client connections to the listener. + """ + schemas: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schemas'), 'exclude': lambda f: f is None }}) + r"""The list of schemas to sync from. Defaults to user. Case sensitive.""" + SOURCE_TYPE: Final[SapHanaEnterprise] = dataclasses.field(default=SapHanaEnterprise.SAP_HANA_ENTERPRISE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + + +SourceSapHanaEnterpriseUpdateMethod = Union[SourceSapHanaEnterpriseScanChangesWithUserDefinedCursor, SourceSapHanaEnterpriseReadChangesUsingChangeDataCaptureCDC] + +SourceSapHanaEnterpriseEncryption = Union[SourceSapHanaEnterpriseUnencrypted, SourceSapHanaEnterpriseNativeNetworkEncryptionNNE, SourceSapHanaEnterpriseTLSEncryptedVerifyCertificate] + +SourceSapHanaEnterpriseSSHTunnelMethod = Union[SourceSapHanaEnterpriseNoTunnel, SourceSapHanaEnterpriseSSHKeyAuthentication, SourceSapHanaEnterprisePasswordAuthentication] diff --git a/src/airbyte_api/models/source_serpstat.py b/src/airbyte_api/models/source_serpstat.py new file mode 100644 index 00000000..8e0f8691 --- /dev/null +++ b/src/airbyte_api/models/source_serpstat.py @@ -0,0 +1,40 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Any, Final, List, Optional + + +class Serpstat(str, Enum): + SERPSTAT = 'serpstat' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSerpstat: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Serpstat API key can be found here: https://serpstat.com/users/profile/""" + domain: Optional[str] = dataclasses.field(default='serpstat.com', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain'), 'exclude': lambda f: f is None }}) + r"""The domain name to get data for (ex. serpstat.com)""" + domains: Optional[List[Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domains'), 'exclude': lambda f: f is None }}) + r"""The list of domains that will be used in streams that support batch operations""" + filter_by: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter_by'), 'exclude': lambda f: f is None }}) + r"""The field name by which the results should be filtered. Filtering the results will result in fewer API credits spent. Each stream has different filtering options. See https://serpstat.com/api/ for more details.""" + filter_value: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter_value'), 'exclude': lambda f: f is None }}) + r"""The value of the field to filter by. Each stream has different filtering options. See https://serpstat.com/api/ for more details.""" + page_size: Optional[int] = dataclasses.field(default=10, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('page_size'), 'exclude': lambda f: f is None }}) + r"""The number of data rows per page to be returned. Each data row can contain multiple data points. The max value is 1000. Reducing the size of the page will result in fewer API credits spent.""" + pages_to_fetch: Optional[int] = dataclasses.field(default=1, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('pages_to_fetch'), 'exclude': lambda f: f is None }}) + r"""The number of pages that should be fetched. All results will be obtained if left blank. Reducing the number of pages will result in fewer API credits spent.""" + region_id: Optional[str] = dataclasses.field(default='g_us', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region_id'), 'exclude': lambda f: f is None }}) + r"""The ID of a region to get data from in the form of a two-letter country code prepended with the g_ prefix. See the list of supported region IDs here: https://serpstat.com/api/664-request-parameters-v4/.""" + sort_by: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sort_by'), 'exclude': lambda f: f is None }}) + r"""The field name by which the results should be sorted. Each stream has different sorting options. See https://serpstat.com/api/ for more details.""" + sort_value: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sort_value'), 'exclude': lambda f: f is None }}) + r"""The value of the field to sort by. Each stream has different sorting options. See https://serpstat.com/api/ for more details.""" + SOURCE_TYPE: Final[Serpstat] = dataclasses.field(default=Serpstat.SERPSTAT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_service_now.py b/src/airbyte_api/models/source_service_now.py new file mode 100644 index 00000000..ed6406d4 --- /dev/null +++ b/src/airbyte_api/models/source_service_now.py @@ -0,0 +1,23 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class ServiceNow(str, Enum): + SERVICE_NOW = 'service-now' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceServiceNow: + base_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('base_url') }}) + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + SOURCE_TYPE: Final[ServiceNow] = dataclasses.field(default=ServiceNow.SERVICE_NOW, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_sftp_bulk.py b/src/airbyte_api/models/source_sftp_bulk.py index 96f7d4d6..04827937 100644 --- a/src/airbyte_api/models/source_sftp_bulk.py +++ b/src/airbyte_api/models/source_sftp_bulk.py @@ -47,6 +47,8 @@ class SourceSftpBulkSchemasDeliveryType(str, Enum): class SourceSftpBulkCopyRawFiles: r"""Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.""" DELIVERY_TYPE: Final[Optional[SourceSftpBulkSchemasDeliveryType]] = dataclasses.field(default=SourceSftpBulkSchemasDeliveryType.USE_FILE_TRANSFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_type'), 'exclude': lambda f: f is None }}) + preserve_directory_structure: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('preserve_directory_structure'), 'exclude': lambda f: f is None }}) + r"""If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled.""" diff --git a/src/airbyte_api/models/source_sharepoint_enterprise.py b/src/airbyte_api/models/source_sharepoint_enterprise.py new file mode 100644 index 00000000..73d3e443 --- /dev/null +++ b/src/airbyte_api/models/source_sharepoint_enterprise.py @@ -0,0 +1,342 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, List, Optional, Union + + +class SourceSharepointEnterpriseSchemasAuthType(str, Enum): + SERVICE = 'Service' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterpriseServiceKeyAuthentication: + r"""ServiceCredentials class for service key authentication. + This class is structured similarly to OAuthCredentials but for a different authentication method. + """ + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + r"""Client ID of your Microsoft developer application""" + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + r"""Client Secret of your Microsoft developer application""" + tenant_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tenant_id') }}) + r"""Tenant ID of the Microsoft SharePoint user""" + user_principal_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('user_principal_name') }}) + r"""Special characters such as a period, comma, space, and the at sign (@) are converted to underscores (_). More details: https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls""" + AUTH_TYPE: Final[Optional[SourceSharepointEnterpriseSchemasAuthType]] = dataclasses.field(default=SourceSharepointEnterpriseSchemasAuthType.SERVICE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) + + + + +class SourceSharepointEnterpriseAuthType(str, Enum): + CLIENT = 'Client' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterpriseAuthenticateViaMicrosoftOAuth: + r"""OAuthCredentials class to hold authentication details for Microsoft OAuth authentication. + This class uses pydantic for data validation and settings management. + """ + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + r"""Client ID of your Microsoft developer application""" + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + r"""Client Secret of your Microsoft developer application""" + tenant_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tenant_id') }}) + r"""Tenant ID of the Microsoft SharePoint user""" + AUTH_TYPE: Final[Optional[SourceSharepointEnterpriseAuthType]] = dataclasses.field(default=SourceSharepointEnterpriseAuthType.CLIENT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) + refresh_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token'), 'exclude': lambda f: f is None }}) + r"""Refresh Token of your Microsoft developer application""" + + + + +class SourceSharepointEnterpriseSchemasDeliveryMethodDeliveryType(str, Enum): + USE_PERMISSIONS_TRANSFER = 'use_permissions_transfer' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterpriseReplicatePermissionsACL: + r"""Sends one identity stream and one for more permissions (ACL) streams to the destination. This data can be used in downstream systems to recreate permission restrictions mirroring the original source.""" + DELIVERY_TYPE: Final[Optional[SourceSharepointEnterpriseSchemasDeliveryMethodDeliveryType]] = dataclasses.field(default=SourceSharepointEnterpriseSchemasDeliveryMethodDeliveryType.USE_PERMISSIONS_TRANSFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_type'), 'exclude': lambda f: f is None }}) + include_identities_stream: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('include_identities_stream'), 'exclude': lambda f: f is None }}) + r"""This data can be used in downstream systems to recreate permission restrictions mirroring the original source""" + + + + +class SourceSharepointEnterpriseSchemasDeliveryType(str, Enum): + USE_FILE_TRANSFER = 'use_file_transfer' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterpriseCopyRawFiles: + r"""Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.""" + DELIVERY_TYPE: Final[Optional[SourceSharepointEnterpriseSchemasDeliveryType]] = dataclasses.field(default=SourceSharepointEnterpriseSchemasDeliveryType.USE_FILE_TRANSFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_type'), 'exclude': lambda f: f is None }}) + preserve_directory_structure: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('preserve_directory_structure'), 'exclude': lambda f: f is None }}) + r"""If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled.""" + + + + +class SourceSharepointEnterpriseDeliveryType(str, Enum): + USE_RECORDS_TRANSFER = 'use_records_transfer' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterpriseReplicateRecords: + r"""Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.""" + DELIVERY_TYPE: Final[Optional[SourceSharepointEnterpriseDeliveryType]] = dataclasses.field(default=SourceSharepointEnterpriseDeliveryType.USE_RECORDS_TRANSFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_type'), 'exclude': lambda f: f is None }}) + + + + +class SourceSharepointEnterpriseSearchScope(str, Enum): + r"""Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both.""" + ACCESSIBLE_DRIVES = 'ACCESSIBLE_DRIVES' + SHARED_ITEMS = 'SHARED_ITEMS' + ALL = 'ALL' + + +class SourceSharepointEnterpriseSharepointEnterprise(str, Enum): + SHAREPOINT_ENTERPRISE = 'sharepoint-enterprise' + + +class SourceSharepointEnterpriseSchemasStreamsFormatFormat6Filetype(str, Enum): + EXCEL = 'excel' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterpriseExcelFormat: + FILETYPE: Final[Optional[SourceSharepointEnterpriseSchemasStreamsFormatFormat6Filetype]] = dataclasses.field(default=SourceSharepointEnterpriseSchemasStreamsFormatFormat6Filetype.EXCEL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) + + + + +class SourceSharepointEnterpriseSchemasStreamsFormatFormatFiletype(str, Enum): + UNSTRUCTURED = 'unstructured' + + +class SourceSharepointEnterpriseMode(str, Enum): + LOCAL = 'local' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterpriseLocal: + r"""Process files locally, supporting `fast` and `ocr` modes. This is the default option.""" + MODE: Final[Optional[SourceSharepointEnterpriseMode]] = dataclasses.field(default=SourceSharepointEnterpriseMode.LOCAL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + + + + +class SourceSharepointEnterpriseParsingStrategy(str, Enum): + r"""The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf""" + AUTO = 'auto' + FAST = 'fast' + OCR_ONLY = 'ocr_only' + HI_RES = 'hi_res' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterpriseUnstructuredDocumentFormat: + r"""Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.""" + FILETYPE: Final[Optional[SourceSharepointEnterpriseSchemasStreamsFormatFormatFiletype]] = dataclasses.field(default=SourceSharepointEnterpriseSchemasStreamsFormatFormatFiletype.UNSTRUCTURED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) + processing: Optional[SourceSharepointEnterpriseProcessing] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('processing'), 'exclude': lambda f: f is None }}) + r"""Processing configuration""" + skip_unprocessable_files: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('skip_unprocessable_files'), 'exclude': lambda f: f is None }}) + r"""If true, skip files that cannot be parsed and pass the error message along as the _ab_source_file_parse_error field. If false, fail the sync.""" + strategy: Optional[SourceSharepointEnterpriseParsingStrategy] = dataclasses.field(default=SourceSharepointEnterpriseParsingStrategy.AUTO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('strategy'), 'exclude': lambda f: f is None }}) + r"""The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf""" + + + + +class SourceSharepointEnterpriseSchemasStreamsFormatFiletype(str, Enum): + PARQUET = 'parquet' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterpriseParquetFormat: + decimal_as_float: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('decimal_as_float'), 'exclude': lambda f: f is None }}) + r"""Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.""" + FILETYPE: Final[Optional[SourceSharepointEnterpriseSchemasStreamsFormatFiletype]] = dataclasses.field(default=SourceSharepointEnterpriseSchemasStreamsFormatFiletype.PARQUET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) + + + + +class SourceSharepointEnterpriseSchemasStreamsFiletype(str, Enum): + JSONL = 'jsonl' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterpriseJsonlFormat: + FILETYPE: Final[Optional[SourceSharepointEnterpriseSchemasStreamsFiletype]] = dataclasses.field(default=SourceSharepointEnterpriseSchemasStreamsFiletype.JSONL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) + + + + +class SourceSharepointEnterpriseSchemasFiletype(str, Enum): + CSV = 'csv' + + +class SourceSharepointEnterpriseSchemasStreamsHeaderDefinitionType(str, Enum): + USER_PROVIDED = 'User Provided' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterpriseUserProvided: + column_names: List[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('column_names') }}) + r"""The column names that will be used while emitting the CSV records""" + HEADER_DEFINITION_TYPE: Final[Optional[SourceSharepointEnterpriseSchemasStreamsHeaderDefinitionType]] = dataclasses.field(default=SourceSharepointEnterpriseSchemasStreamsHeaderDefinitionType.USER_PROVIDED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('header_definition_type'), 'exclude': lambda f: f is None }}) + + + + +class SourceSharepointEnterpriseSchemasHeaderDefinitionType(str, Enum): + AUTOGENERATED = 'Autogenerated' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterpriseAutogenerated: + HEADER_DEFINITION_TYPE: Final[Optional[SourceSharepointEnterpriseSchemasHeaderDefinitionType]] = dataclasses.field(default=SourceSharepointEnterpriseSchemasHeaderDefinitionType.AUTOGENERATED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('header_definition_type'), 'exclude': lambda f: f is None }}) + + + + +class SourceSharepointEnterpriseHeaderDefinitionType(str, Enum): + FROM_CSV = 'From CSV' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterpriseFromCSV: + HEADER_DEFINITION_TYPE: Final[Optional[SourceSharepointEnterpriseHeaderDefinitionType]] = dataclasses.field(default=SourceSharepointEnterpriseHeaderDefinitionType.FROM_CSV, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('header_definition_type'), 'exclude': lambda f: f is None }}) + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterpriseCSVFormat: + delimiter: Optional[str] = dataclasses.field(default=',', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delimiter'), 'exclude': lambda f: f is None }}) + r"""The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.""" + double_quote: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('double_quote'), 'exclude': lambda f: f is None }}) + r"""Whether two quotes in a quoted CSV value denote a single quote in the data.""" + encoding: Optional[str] = dataclasses.field(default='utf8', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encoding'), 'exclude': lambda f: f is None }}) + r"""The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.""" + escape_char: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('escape_char'), 'exclude': lambda f: f is None }}) + r"""The character used for escaping special characters. To disallow escaping, leave this field blank.""" + false_values: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('false_values'), 'exclude': lambda f: f is None }}) + r"""A set of case-sensitive strings that should be interpreted as false values.""" + FILETYPE: Final[Optional[SourceSharepointEnterpriseSchemasFiletype]] = dataclasses.field(default=SourceSharepointEnterpriseSchemasFiletype.CSV, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) + header_definition: Optional[SourceSharepointEnterpriseCSVHeaderDefinition] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('header_definition'), 'exclude': lambda f: f is None }}) + r"""How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.""" + ignore_errors_on_fields_mismatch: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ignore_errors_on_fields_mismatch'), 'exclude': lambda f: f is None }}) + r"""Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema.""" + null_values: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('null_values'), 'exclude': lambda f: f is None }}) + r"""A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.""" + quote_char: Optional[str] = dataclasses.field(default='"', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('quote_char'), 'exclude': lambda f: f is None }}) + r"""The character used for quoting CSV values. To disallow quoting, make this field blank.""" + skip_rows_after_header: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('skip_rows_after_header'), 'exclude': lambda f: f is None }}) + r"""The number of rows to skip after the header row.""" + skip_rows_before_header: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('skip_rows_before_header'), 'exclude': lambda f: f is None }}) + r"""The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.""" + strings_can_be_null: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('strings_can_be_null'), 'exclude': lambda f: f is None }}) + r"""Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.""" + true_values: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('true_values'), 'exclude': lambda f: f is None }}) + r"""A set of case-sensitive strings that should be interpreted as true values.""" + + + + +class SourceSharepointEnterpriseFiletype(str, Enum): + AVRO = 'avro' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterpriseAvroFormat: + double_as_string: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('double_as_string'), 'exclude': lambda f: f is None }}) + r"""Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.""" + FILETYPE: Final[Optional[SourceSharepointEnterpriseFiletype]] = dataclasses.field(default=SourceSharepointEnterpriseFiletype.AVRO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) + + + + +class SourceSharepointEnterpriseValidationPolicy(str, Enum): + r"""The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.""" + EMIT_RECORD = 'Emit Record' + SKIP_RECORD = 'Skip Record' + WAIT_FOR_DISCOVER = 'Wait for Discover' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterpriseFileBasedStreamConfig: + format: SourceSharepointEnterpriseFormat = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format') }}) + r"""The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.""" + name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + r"""The name of the stream.""" + days_to_sync_if_history_is_full: Optional[int] = dataclasses.field(default=3, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('days_to_sync_if_history_is_full'), 'exclude': lambda f: f is None }}) + r"""When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.""" + globs: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('globs'), 'exclude': lambda f: f is None }}) + r"""The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.""" + input_schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('input_schema'), 'exclude': lambda f: f is None }}) + r"""The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.""" + recent_n_files_to_read_for_schema_discovery: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('recent_n_files_to_read_for_schema_discovery'), 'exclude': lambda f: f is None }}) + r"""The number of resent files which will be used to discover the schema for this stream.""" + schemaless: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schemaless'), 'exclude': lambda f: f is None }}) + r"""When enabled, syncs will not validate or structure records against the stream's schema.""" + validation_policy: Optional[SourceSharepointEnterpriseValidationPolicy] = dataclasses.field(default=SourceSharepointEnterpriseValidationPolicy.EMIT_RECORD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('validation_policy'), 'exclude': lambda f: f is None }}) + r"""The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.""" + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSharepointEnterprise: + r"""SourceMicrosoftSharePointSpec class for Microsoft SharePoint Source Specification. + This class combines the authentication details with additional configuration for the SharePoint API. + """ + credentials: SourceSharepointEnterpriseAuthentication = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }}) + r"""Credentials for connecting to the One Drive API""" + streams: List[SourceSharepointEnterpriseFileBasedStreamConfig] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('streams') }}) + r"""Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.""" + delivery_method: Optional[SourceSharepointEnterpriseDeliveryMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_method'), 'exclude': lambda f: f is None }}) + folder_path: Optional[str] = dataclasses.field(default='.', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('folder_path'), 'exclude': lambda f: f is None }}) + r"""Path to a specific folder within the drives to search for files. Leave empty to search all folders of the drives. This does not apply to shared items.""" + search_scope: Optional[SourceSharepointEnterpriseSearchScope] = dataclasses.field(default=SourceSharepointEnterpriseSearchScope.ALL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('search_scope'), 'exclude': lambda f: f is None }}) + r"""Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both.""" + site_url: Optional[str] = dataclasses.field(default='', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('site_url'), 'exclude': lambda f: f is None }}) + r"""Url of SharePoint site to search for files. Leave empty to search in the main site. Use 'https://.sharepoint.com/sites/' to iterate over all sites.""" + SOURCE_TYPE: Final[SourceSharepointEnterpriseSharepointEnterprise] = dataclasses.field(default=SourceSharepointEnterpriseSharepointEnterprise.SHAREPOINT_ENTERPRISE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) + r"""UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.""" + + + +SourceSharepointEnterpriseAuthentication = Union[SourceSharepointEnterpriseAuthenticateViaMicrosoftOAuth, SourceSharepointEnterpriseServiceKeyAuthentication] + +SourceSharepointEnterpriseDeliveryMethod = Union[SourceSharepointEnterpriseReplicateRecords, SourceSharepointEnterpriseCopyRawFiles, SourceSharepointEnterpriseReplicatePermissionsACL] + +SourceSharepointEnterpriseProcessing = Union[SourceSharepointEnterpriseLocal] + +SourceSharepointEnterpriseCSVHeaderDefinition = Union[SourceSharepointEnterpriseFromCSV, SourceSharepointEnterpriseAutogenerated, SourceSharepointEnterpriseUserProvided] + +SourceSharepointEnterpriseFormat = Union[SourceSharepointEnterpriseAvroFormat, SourceSharepointEnterpriseCSVFormat, SourceSharepointEnterpriseJsonlFormat, SourceSharepointEnterpriseParquetFormat, SourceSharepointEnterpriseUnstructuredDocumentFormat, SourceSharepointEnterpriseExcelFormat] diff --git a/src/airbyte_api/models/source_shipstation.py b/src/airbyte_api/models/source_shipstation.py new file mode 100644 index 00000000..131e819b --- /dev/null +++ b/src/airbyte_api/models/source_shipstation.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Shipstation(str, Enum): + SHIPSTATION = 'shipstation' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceShipstation: + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + SOURCE_TYPE: Final[Shipstation] = dataclasses.field(default=Shipstation.SHIPSTATION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_shopify.py b/src/airbyte_api/models/source_shopify.py index 0d845201..170b77f8 100644 --- a/src/airbyte_api/models/source_shopify.py +++ b/src/airbyte_api/models/source_shopify.py @@ -60,7 +60,7 @@ class SourceShopify: fetch_transactions_user_id: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('fetch_transactions_user_id'), 'exclude': lambda f: f is None }}) r"""Defines which API type (REST/BULK) to use to fetch `Transactions` data. If you are a `Shopify Plus` user, leave the default value to speed up the fetch.""" job_checkpoint_interval: Optional[int] = dataclasses.field(default=100000, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('job_checkpoint_interval'), 'exclude': lambda f: f is None }}) - r"""The threshold, after which the single BULK Job should be checkpointed.""" + r"""The threshold, after which the single BULK Job should be checkpointed (min: 15k, max: 1M)""" job_product_variants_include_pres_prices: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('job_product_variants_include_pres_prices'), 'exclude': lambda f: f is None }}) r"""If enabled, the `Product Variants` stream attempts to include `Presentment prices` field (may affect the performance).""" job_termination_threshold: Optional[int] = dataclasses.field(default=7200, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('job_termination_threshold'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_shopwired.py b/src/airbyte_api/models/source_shopwired.py new file mode 100644 index 00000000..59a70f13 --- /dev/null +++ b/src/airbyte_api/models/source_shopwired.py @@ -0,0 +1,27 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Shopwired(str, Enum): + SHOPWIRED = 'shopwired' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceShopwired: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your API Key, which acts as the username for Basic Authentication. You can find it in your ShopWired account under API settings.""" + api_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_secret') }}) + r"""Your API Secret, which acts as the password for Basic Authentication. You can find it in your ShopWired account under API settings.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Shopwired] = dataclasses.field(default=Shopwired.SHOPWIRED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_shutterstock.py b/src/airbyte_api/models/source_shutterstock.py new file mode 100644 index 00000000..a35d82d0 --- /dev/null +++ b/src/airbyte_api/models/source_shutterstock.py @@ -0,0 +1,33 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Shutterstock(str, Enum): + SHUTTERSTOCK = 'shutterstock' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceShutterstock: + api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }}) + r"""Your OAuth 2.0 token for accessing the Shutterstock API. Obtain this token from your Shutterstock developer account.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + query_for_audio_search: Optional[str] = dataclasses.field(default='mountain', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query_for_audio_search'), 'exclude': lambda f: f is None }}) + r"""The query for image search""" + query_for_catalog_search: Optional[str] = dataclasses.field(default='mountain', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query_for_catalog_search'), 'exclude': lambda f: f is None }}) + r"""The query for catalog search""" + query_for_image_search: Optional[str] = dataclasses.field(default='mountain', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query_for_image_search'), 'exclude': lambda f: f is None }}) + r"""The query for image search""" + query_for_video_search: Optional[str] = dataclasses.field(default='mountain', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query_for_video_search'), 'exclude': lambda f: f is None }}) + r"""The Query for `videos_search` stream""" + SOURCE_TYPE: Final[Shutterstock] = dataclasses.field(default=Shutterstock.SHUTTERSTOCK, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_signnow.py b/src/airbyte_api/models/source_signnow.py new file mode 100644 index 00000000..23c94b02 --- /dev/null +++ b/src/airbyte_api/models/source_signnow.py @@ -0,0 +1,29 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Any, Final, List, Optional + + +class Signnow(str, Enum): + SIGNNOW = 'signnow' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSignnow: + api_key_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key_id') }}) + r"""Api key which could be found in API section after enlarging keys section""" + auth_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_token') }}) + r"""The authorization token is needed for `signing_links` stream which could be seen from enlarged view of `https://app.signnow.com/webapp/api-dashboard/keys`""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + name_filter_for_documents: Optional[List[Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name_filter_for_documents'), 'exclude': lambda f: f is None }}) + r"""Name filter for documents stream""" + SOURCE_TYPE: Final[Signnow] = dataclasses.field(default=Signnow.SIGNNOW, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_slack.py b/src/airbyte_api/models/source_slack.py index 5b6b7a72..b95d2ec3 100644 --- a/src/airbyte_api/models/source_slack.py +++ b/src/airbyte_api/models/source_slack.py @@ -53,6 +53,8 @@ class SourceSlack: r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.""" channel_filter: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('channel_filter'), 'exclude': lambda f: f is None }}) r"""A channel name list (without leading '#' char) which limit the channels from which you'd like to sync. Empty list means no filter.""" + channel_messages_window_size: Optional[int] = dataclasses.field(default=100, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('channel_messages_window_size'), 'exclude': lambda f: f is None }}) + r"""The size (in days) of the date window that will be used while syncing data from the channel messages stream. A smaller window will allow for greater parallelization when syncing records, but can lead to rate limiting errors.""" credentials: Optional[SourceSlackAuthenticationMechanism] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) r"""Choose how to authenticate into Slack""" include_private_channels: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('include_private_channels'), 'exclude': lambda f: f is None }}) @@ -61,6 +63,8 @@ class SourceSlack: r"""Whether to join all channels or to sync data only from channels the bot is already in. If false, you'll need to manually add the bot to all the channels from which you'd like to sync messages.""" lookback_window: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window'), 'exclude': lambda f: f is None }}) r"""How far into the past to look for messages in threads, default is 0 days""" + num_workers: Optional[int] = dataclasses.field(default=2, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_workers'), 'exclude': lambda f: f is None }}) + r"""The number of worker threads to use for the sync.""" SOURCE_TYPE: Final[SourceSlackSlack] = dataclasses.field(default=SourceSlackSlack.SLACK, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_smartsheets.py b/src/airbyte_api/models/source_smartsheets.py index a4244c9d..7b95d75a 100644 --- a/src/airbyte_api/models/source_smartsheets.py +++ b/src/airbyte_api/models/source_smartsheets.py @@ -76,6 +76,8 @@ class SourceSmartsheets: credentials: SourceSmartsheetsAuthorizationMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }}) spreadsheet_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('spreadsheet_id') }}) r"""The spreadsheet ID. Find it by opening the spreadsheet then navigating to File > Properties""" + is_report: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('is_report'), 'exclude': lambda f: f is None }}) + r"""If true, the source will treat the provided sheet_id as a report. If false, the source will treat the provided sheet_id as a sheet.""" metadata_fields: Optional[List[Validenums]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('metadata_fields'), 'exclude': lambda f: f is None }}) r"""A List of available columns which metadata can be pulled from.""" SOURCE_TYPE: Final[SourceSmartsheetsSmartsheets] = dataclasses.field(default=SourceSmartsheetsSmartsheets.SMARTSHEETS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_snapchat_marketing.py b/src/airbyte_api/models/source_snapchat_marketing.py index b5ba7e53..a55fbb21 100644 --- a/src/airbyte_api/models/source_snapchat_marketing.py +++ b/src/airbyte_api/models/source_snapchat_marketing.py @@ -7,7 +7,7 @@ from dataclasses_json import Undefined, dataclass_json from datetime import date from enum import Enum -from typing import Final, Optional +from typing import Any, Final, List, Optional class ActionReportTime(str, Enum): @@ -47,8 +47,12 @@ class SourceSnapchatMarketing: r"""Refresh Token to renew the expired Access Token.""" action_report_time: Optional[ActionReportTime] = dataclasses.field(default=ActionReportTime.CONVERSION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('action_report_time'), 'exclude': lambda f: f is None }}) r"""Specifies the principle for conversion reporting.""" + ad_account_ids: Optional[List[Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ad_account_ids'), 'exclude': lambda f: f is None }}) + r"""Ad Account IDs of the ad accounts to retrieve""" end_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) r"""Date in the format 2017-01-25. Any data after this date will not be replicated.""" + organization_ids: Optional[List[Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('organization_ids'), 'exclude': lambda f: f is None }}) + r"""The IDs of the organizations to retrieve""" SOURCE_TYPE: Final[SourceSnapchatMarketingSnapchatMarketing] = dataclasses.field(default=SourceSnapchatMarketingSnapchatMarketing.SNAPCHAT_MARKETING, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[date] = dataclasses.field(default=dateutil.parser.parse('2022-01-01').date(), metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) r"""Date in the format 2022-01-01. Any data before this date will not be replicated.""" diff --git a/src/airbyte_api/models/source_snowflake.py b/src/airbyte_api/models/source_snowflake.py index 3716b78f..ccc85180 100644 --- a/src/airbyte_api/models/source_snowflake.py +++ b/src/airbyte_api/models/source_snowflake.py @@ -5,10 +5,10 @@ from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from enum import Enum -from typing import Final, Optional, Union +from typing import Any, Dict, Final, Optional, Union -class SourceSnowflakeSchemasCredentialsAuthType(str, Enum): +class SourceSnowflakeSchemasAuthType(str, Enum): USERNAME_PASSWORD = 'username/password' @@ -19,12 +19,13 @@ class SourceSnowflakeUsernameAndPassword: r"""The password associated with the username.""" username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) r"""The username you created to allow Airbyte to access the database.""" - AUTH_TYPE: Final[SourceSnowflakeSchemasCredentialsAuthType] = dataclasses.field(default=SourceSnowflakeSchemasCredentialsAuthType.USERNAME_PASSWORD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + auth_type: Optional[SourceSnowflakeSchemasAuthType] = dataclasses.field(default=SourceSnowflakeSchemasAuthType.USERNAME_PASSWORD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) -class SourceSnowflakeSchemasAuthType(str, Enum): +class SourceSnowflakeAuthType(str, Enum): KEY_PAIR_AUTHENTICATION = 'Key Pair Authentication' @@ -35,29 +36,24 @@ class SourceSnowflakeKeyPairAuthentication: r"""RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key.""" username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) r"""The username you created to allow Airbyte to access the database.""" - AUTH_TYPE: Final[Optional[SourceSnowflakeSchemasAuthType]] = dataclasses.field(default=SourceSnowflakeSchemasAuthType.KEY_PAIR_AUTHENTICATION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + auth_type: Optional[SourceSnowflakeAuthType] = dataclasses.field(default=SourceSnowflakeAuthType.KEY_PAIR_AUTHENTICATION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) private_key_password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('private_key_password'), 'exclude': lambda f: f is None }}) r"""Passphrase for private key""" -class SourceSnowflakeAuthType(str, Enum): - O_AUTH = 'OAuth' +class SourceSnowflakeCursorMethod(str, Enum): + USER_DEFINED = 'user_defined' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class SourceSnowflakeOAuth20: - client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) - r"""The Client ID of your Snowflake developer application.""" - client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) - r"""The Client Secret of your Snowflake developer application.""" - access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token'), 'exclude': lambda f: f is None }}) - r"""Access Token for making authenticated requests.""" - AUTH_TYPE: Final[SourceSnowflakeAuthType] = dataclasses.field(default=SourceSnowflakeAuthType.O_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) - refresh_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token'), 'exclude': lambda f: f is None }}) - r"""Refresh Token for making authenticated requests.""" +class SourceSnowflakeScanChangesWithUserDefinedCursor: + r"""Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + cursor_method: Optional[SourceSnowflakeCursorMethod] = dataclasses.field(default=SourceSnowflakeCursorMethod.USER_DEFINED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cursor_method'), 'exclude': lambda f: f is None }}) @@ -77,7 +73,15 @@ class SourceSnowflake: r"""The role you created for Airbyte to access Snowflake.""" warehouse: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('warehouse') }}) r"""The warehouse you created for Airbyte to access data.""" + check_privileges: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('check_privileges'), 'exclude': lambda f: f is None }}) + r"""When this feature is enabled, during schema discovery the connector will query each table or view individually to check access privileges and inaccessible tables, views, or columns therein will be removed. In large schemas, this might cause schema discovery to take too long, in which case it might be advisable to disable this feature.""" + checkpoint_target_interval_seconds: Optional[int] = dataclasses.field(default=300, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('checkpoint_target_interval_seconds'), 'exclude': lambda f: f is None }}) + r"""How often (in seconds) a stream should checkpoint, when possible.""" + concurrency: Optional[int] = dataclasses.field(default=1, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('concurrency'), 'exclude': lambda f: f is None }}) + r"""Maximum number of concurrent queries to the database.""" credentials: Optional[SourceSnowflakeAuthorizationMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) + cursor: Optional[SourceSnowflakeUpdateMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cursor'), 'exclude': lambda f: f is None }}) + r"""Configures how data is extracted from the database.""" jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).""" schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schema'), 'exclude': lambda f: f is None }}) @@ -86,4 +90,6 @@ class SourceSnowflake: -SourceSnowflakeAuthorizationMethod = Union[SourceSnowflakeOAuth20, SourceSnowflakeKeyPairAuthentication, SourceSnowflakeUsernameAndPassword] +SourceSnowflakeAuthorizationMethod = Union[SourceSnowflakeKeyPairAuthentication, SourceSnowflakeUsernameAndPassword] + +SourceSnowflakeUpdateMethod = Union[SourceSnowflakeScanChangesWithUserDefinedCursor] diff --git a/src/airbyte_api/models/source_spotify_ads.py b/src/airbyte_api/models/source_spotify_ads.py new file mode 100644 index 00000000..30b7f6e2 --- /dev/null +++ b/src/airbyte_api/models/source_spotify_ads.py @@ -0,0 +1,67 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, List + + +class Fields(str, Enum): + CLICKS = 'CLICKS' + COMPLETES = 'COMPLETES' + COMPLETION_RATE = 'COMPLETION_RATE' + CONVERSION_RATE = 'CONVERSION_RATE' + CTR = 'CTR' + E_CPM = 'E_CPM' + E_CPCL = 'E_CPCL' + FIRST_QUARTILES = 'FIRST_QUARTILES' + FREQUENCY = 'FREQUENCY' + IMPRESSIONS = 'IMPRESSIONS' + INTENT_RATE = 'INTENT_RATE' + LISTENERS = 'LISTENERS' + MIDPOINTS = 'MIDPOINTS' + NEW_LISTENERS = 'NEW_LISTENERS' + NEW_LISTENER_CONVERSION_RATE = 'NEW_LISTENER_CONVERSION_RATE' + NEW_LISTENER_STREAMS = 'NEW_LISTENER_STREAMS' + OFF_SPOTIFY_IMPRESSIONS = 'OFF_SPOTIFY_IMPRESSIONS' + PAID_LISTENS = 'PAID_LISTENS' + PAID_LISTENS_FREQUENCY = 'PAID_LISTENS_FREQUENCY' + PAID_LISTENS_REACH = 'PAID_LISTENS_REACH' + REACH = 'REACH' + SKIPS = 'SKIPS' + SPEND = 'SPEND' + STARTS = 'STARTS' + STREAMS = 'STREAMS' + STREAMS_PER_NEW_LISTENER = 'STREAMS_PER_NEW_LISTENER' + STREAMS_PER_USER = 'STREAMS_PER_USER' + THIRD_QUARTILES = 'THIRD_QUARTILES' + VIDEO_VIEWS = 'VIDEO_VIEWS' + VIDEO_EXPANDS = 'VIDEO_EXPANDS' + VIDEO_EXPAND_RATE = 'VIDEO_EXPAND_RATE' + UNMUTES = 'UNMUTES' + + +class SpotifyAds(str, Enum): + SPOTIFY_ADS = 'spotify-ads' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSpotifyAds: + ad_account_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ad_account_id') }}) + r"""The ID of the Spotify Ad Account you want to sync data from.""" + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + r"""The Client ID of your Spotify Developer application.""" + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + r"""The Client Secret of your Spotify Developer application.""" + fields: List[Fields] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('fields') }}) + r"""List of fields to include in the campaign performance report. Choose from available metrics.""" + refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }}) + r"""The Refresh Token obtained from the initial OAuth 2.0 authorization flow.""" + start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }}) + r"""The date to start syncing data from, in YYYY-MM-DD format.""" + SOURCE_TYPE: Final[SpotifyAds] = dataclasses.field(default=SpotifyAds.SPOTIFY_ADS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_svix.py b/src/airbyte_api/models/source_svix.py new file mode 100644 index 00000000..e8190063 --- /dev/null +++ b/src/airbyte_api/models/source_svix.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Svix(str, Enum): + SVIX = 'svix' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSvix: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""API key or access token""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Svix] = dataclasses.field(default=Svix.SVIX, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_tavus.py b/src/airbyte_api/models/source_tavus.py new file mode 100644 index 00000000..3e9ade93 --- /dev/null +++ b/src/airbyte_api/models/source_tavus.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Tavus(str, Enum): + TAVUS = 'tavus' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceTavus: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your Tavus API key. You can find this in your Tavus account settings or API dashboard.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Tavus] = dataclasses.field(default=Tavus.TAVUS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_thinkific_courses.py b/src/airbyte_api/models/source_thinkific_courses.py new file mode 100644 index 00000000..baf7aef9 --- /dev/null +++ b/src/airbyte_api/models/source_thinkific_courses.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class ThinkificCourses(str, Enum): + THINKIFIC_COURSES = 'thinkific-courses' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceThinkificCourses: + x_auth_subdomain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('X-Auth-Subdomain') }}) + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + SOURCE_TYPE: Final[ThinkificCourses] = dataclasses.field(default=ThinkificCourses.THINKIFIC_COURSES, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_thrive_learning.py b/src/airbyte_api/models/source_thrive_learning.py new file mode 100644 index 00000000..dd332e65 --- /dev/null +++ b/src/airbyte_api/models/source_thrive_learning.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class ThriveLearning(str, Enum): + THRIVE_LEARNING = 'thrive-learning' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceThriveLearning: + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + r"""Your website Tenant ID (eu-west-000000 please contact support for your tenant)""" + password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + SOURCE_TYPE: Final[ThriveLearning] = dataclasses.field(default=ThriveLearning.THRIVE_LEARNING, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_timely.py b/src/airbyte_api/models/source_timely.py index e7b34a7d..0d2d6dfd 100644 --- a/src/airbyte_api/models/source_timely.py +++ b/src/airbyte_api/models/source_timely.py @@ -2,8 +2,10 @@ from __future__ import annotations import dataclasses +import dateutil.parser from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json +from datetime import datetime from enum import Enum from typing import Final @@ -16,11 +18,11 @@ class Timely(str, Enum): @dataclasses.dataclass class SourceTimely: account_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('account_id') }}) - r"""Timely account id""" + r"""The Account ID for your Timely account""" bearer_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('bearer_token') }}) - r"""Timely bearer token""" - start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }}) - r"""start date""" + r"""The Bearer Token for your Timely account""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + r"""Earliest date from which you want to pull data from.""" SOURCE_TYPE: Final[Timely] = dataclasses.field(default=Timely.TIMELY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_tmdb.py b/src/airbyte_api/models/source_tmdb.py new file mode 100644 index 00000000..6fca9d21 --- /dev/null +++ b/src/airbyte_api/models/source_tmdb.py @@ -0,0 +1,28 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Tmdb(str, Enum): + TMDB = 'tmdb' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceTmdb: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""API Key from tmdb account""" + language: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('language') }}) + r"""Language expressed in ISO 639-1 scheme, Mandate for required streams (Example en-US)""" + movie_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('movie_id') }}) + r"""Target movie ID, Mandate for movie streams (Example is 550)""" + query: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query') }}) + r"""Target movie ID, Mandate for search streams""" + SOURCE_TYPE: Final[Tmdb] = dataclasses.field(default=Tmdb.TMDB, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_toggl.py b/src/airbyte_api/models/source_toggl.py new file mode 100644 index 00000000..ad4eac4f --- /dev/null +++ b/src/airbyte_api/models/source_toggl.py @@ -0,0 +1,30 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Toggl(str, Enum): + TOGGL = 'toggl' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceToggl: + api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }}) + r"""Your API Token. See here. The token is case sensitive.""" + end_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date') }}) + r"""To retrieve time entries created before the given date (inclusive).""" + organization_id: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('organization_id') }}) + r"""Your organization id. See here.""" + start_date: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date') }}) + r"""To retrieve time entries created after the given date (inclusive).""" + workspace_id: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspace_id') }}) + r"""Your workspace id. See here.""" + SOURCE_TYPE: Final[Toggl] = dataclasses.field(default=Toggl.TOGGL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_twelve_data.py b/src/airbyte_api/models/source_twelve_data.py index 6ba37fe1..fd223e80 100644 --- a/src/airbyte_api/models/source_twelve_data.py +++ b/src/airbyte_api/models/source_twelve_data.py @@ -8,7 +8,7 @@ from typing import Final, Optional -class Interval(str, Enum): +class SourceTwelveDataInterval(str, Enum): r"""Between two consecutive points in time series Supports: 1min, 5min, 15min, 30min, 45min, 1h, 2h, 4h, 1day, 1week, 1month""" ONEMIN = '1min' FIVEMIN = '5min' @@ -35,7 +35,7 @@ class SourceTwelveData: r"""Where instrument is traded""" exchange: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('exchange'), 'exclude': lambda f: f is None }}) r"""Where instrument is traded""" - interval: Optional[Interval] = dataclasses.field(default=Interval.ONEDAY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('interval'), 'exclude': lambda f: f is None }}) + interval: Optional[SourceTwelveDataInterval] = dataclasses.field(default=SourceTwelveDataInterval.ONEDAY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('interval'), 'exclude': lambda f: f is None }}) r"""Between two consecutive points in time series Supports: 1min, 5min, 15min, 30min, 45min, 1h, 2h, 4h, 1day, 1week, 1month""" SOURCE_TYPE: Final[TwelveData] = dataclasses.field(default=TwelveData.TWELVE_DATA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) symbol: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('symbol'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_tyntec_sms.py b/src/airbyte_api/models/source_tyntec_sms.py new file mode 100644 index 00000000..b80daabc --- /dev/null +++ b/src/airbyte_api/models/source_tyntec_sms.py @@ -0,0 +1,28 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class TyntecSms(str, Enum): + TYNTEC_SMS = 'tyntec-sms' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceTyntecSms: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your Tyntec API Key. See here""" + from_: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('from') }}) + r"""The phone number of the SMS message sender (international).""" + to: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('to') }}) + r"""The phone number of the SMS message recipient (international).""" + message: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('message'), 'exclude': lambda f: f is None }}) + r"""The content of the SMS message to be sent.""" + SOURCE_TYPE: Final[TyntecSms] = dataclasses.field(default=TyntecSms.TYNTEC_SMS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_uptick.py b/src/airbyte_api/models/source_uptick.py new file mode 100644 index 00000000..a49c46fb --- /dev/null +++ b/src/airbyte_api/models/source_uptick.py @@ -0,0 +1,35 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Uptick(str, Enum): + UPTICK = 'uptick' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceUptick: + base_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('base_url') }}) + r"""Ex: https://demo-fire.onuptick.com/""" + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + client_refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_refresh_token') }}) + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + end_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'exclude': lambda f: f is None }}) + r"""Fetch data up until this date""" + oauth_access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('oauth_access_token'), 'exclude': lambda f: f is None }}) + r"""The current access token. This field might be overridden by the connector based on the token refresh endpoint response.""" + oauth_token_expiry_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('oauth_token_expiry_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) + r"""The date the current access token expires in. This field might be overridden by the connector based on the token refresh endpoint response.""" + SOURCE_TYPE: Final[Uptick] = dataclasses.field(default=Uptick.UPTICK, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + start_date: Optional[str] = dataclasses.field(default='2025-01-01', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'exclude': lambda f: f is None }}) + r"""Fetch data starting from this date (by default 2025-01-01)""" + + diff --git a/src/airbyte_api/models/source_vitally.py b/src/airbyte_api/models/source_vitally.py index 63fde615..fa8c364a 100644 --- a/src/airbyte_api/models/source_vitally.py +++ b/src/airbyte_api/models/source_vitally.py @@ -5,7 +5,7 @@ from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from enum import Enum -from typing import Final +from typing import Final, Optional class Vitally(str, Enum): @@ -22,10 +22,14 @@ class SourceVitallyStatus(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceVitally: - api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) - r"""The API Token for a Vitally account.""" + domain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain') }}) + r"""Provide only the subdomain part, like https://{your-custom-subdomain}.rest.vitally.io/. Keep empty if you don't have a subdomain.""" + secret_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secret_token') }}) + r"""sk_live_secret_token""" status: SourceVitallyStatus = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('status') }}) r"""Status of the Vitally accounts. One of the following values; active, churned, activeOrChurned.""" + basic_auth_header: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('basic_auth_header'), 'exclude': lambda f: f is None }}) + r"""Basic Auth Header""" SOURCE_TYPE: Final[Vitally] = dataclasses.field(default=Vitally.VITALLY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_watchmode.py b/src/airbyte_api/models/source_watchmode.py new file mode 100644 index 00000000..c6fde416 --- /dev/null +++ b/src/airbyte_api/models/source_watchmode.py @@ -0,0 +1,27 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Watchmode(str, Enum): + WATCHMODE = 'watchmode' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceWatchmode: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your API key for authenticating with the Watchmode API. You can request a free API key at https://api.watchmode.com/requestApiKey/.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + search_val: Optional[str] = dataclasses.field(default='Terminator', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('search_val'), 'exclude': lambda f: f is None }}) + r"""The name value for search stream""" + SOURCE_TYPE: Final[Watchmode] = dataclasses.field(default=Watchmode.WATCHMODE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_workday.py b/src/airbyte_api/models/source_workday.py new file mode 100644 index 00000000..c4822039 --- /dev/null +++ b/src/airbyte_api/models/source_workday.py @@ -0,0 +1,60 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Any, Final, List, Optional, Union + + +class Rest(str, Enum): + REST = 'REST' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class RESTAPIStreams: + access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) + r"""Follow the instructions in the \\"OAuth 2.0 in Postman - API Client for Integrations\\" article in the Workday community docs to obtain access token.""" + AUTH_TYPE: Final[Rest] = dataclasses.field(default=Rest.REST, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) + start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) + r"""Rows after this date will be synced, default 2 years ago.""" + + + + +class Raas(str, Enum): + RAAS = 'RAAS' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class ReportBasedStreams: + password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) + report_ids: List[Any] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('report_ids') }}) + r"""Report IDs can be found by clicking the three dots on the right side of the report > Web Service > View URLs > in JSON url copy everything between Workday tenant/ and ?format=json.""" + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + AUTH_TYPE: Final[Raas] = dataclasses.field(default=Raas.RAAS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) + + + + +class Workday(str, Enum): + WORKDAY = 'workday' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceWorkday: + credentials: SourceWorkdayAuthentication = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }}) + r"""Report Based Streams and REST API Streams use different methods of Authentication. Choose streams type you want to sync and provide needed credentials for them.""" + host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }}) + tenant_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tenant_id') }}) + SOURCE_TYPE: Final[Workday] = dataclasses.field(default=Workday.WORKDAY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + + +SourceWorkdayAuthentication = Union[ReportBasedStreams, RESTAPIStreams] diff --git a/src/airbyte_api/models/source_younium.py b/src/airbyte_api/models/source_younium.py new file mode 100644 index 00000000..03ec622e --- /dev/null +++ b/src/airbyte_api/models/source_younium.py @@ -0,0 +1,28 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class Younium(str, Enum): + YOUNIUM = 'younium' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceYounium: + legal_entity: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('legal_entity') }}) + r"""Legal Entity that data should be pulled from""" + password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) + r"""Account password for younium account API key""" + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + r"""Username for Younium account""" + playground: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('playground'), 'exclude': lambda f: f is None }}) + r"""Property defining if connector is used against playground or production environment""" + SOURCE_TYPE: Final[Younium] = dataclasses.field(default=Younium.YOUNIUM, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_yousign.py b/src/airbyte_api/models/source_yousign.py new file mode 100644 index 00000000..20b1098d --- /dev/null +++ b/src/airbyte_api/models/source_yousign.py @@ -0,0 +1,35 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Yousign(str, Enum): + YOUSIGN = 'yousign' + + +class SourceYousignSubdomain(str, Enum): + r"""The subdomain for the Yousign API environment, such as 'sandbox' or 'api'.""" + API_SANDBOX = 'api-sandbox' + API = 'api' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceYousign: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""API key or access token""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + limit: Optional[str] = dataclasses.field(default='10', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('limit'), 'exclude': lambda f: f is None }}) + r"""Limit for each response objects""" + SOURCE_TYPE: Final[Yousign] = dataclasses.field(default=Yousign.YOUSIGN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + subdomain: Optional[SourceYousignSubdomain] = dataclasses.field(default=SourceYousignSubdomain.API, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('subdomain'), 'exclude': lambda f: f is None }}) + r"""The subdomain for the Yousign API environment, such as 'sandbox' or 'api'.""" + + diff --git a/src/airbyte_api/models/source_zapsign.py b/src/airbyte_api/models/source_zapsign.py new file mode 100644 index 00000000..6ad97444 --- /dev/null +++ b/src/airbyte_api/models/source_zapsign.py @@ -0,0 +1,27 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Any, Final, List, Optional + + +class Zapsign(str, Enum): + ZAPSIGN = 'zapsign' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceZapsign: + api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }}) + r"""Your static API token for authentication. You can find it in your ZapSign account under the 'Settings' or 'API' section. For more details, refer to the [Getting Started](https://docs.zapsign.com.br/english/getting-started#how-do-i-get-my-api-token) guide.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + signer_ids: Optional[List[Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('signer_ids'), 'exclude': lambda f: f is None }}) + r"""The signer ids for signer stream""" + SOURCE_TYPE: Final[Zapsign] = dataclasses.field(default=Zapsign.ZAPSIGN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_zendesk_chat.py b/src/airbyte_api/models/source_zendesk_chat.py index c584be56..cc273b42 100644 --- a/src/airbyte_api/models/source_zendesk_chat.py +++ b/src/airbyte_api/models/source_zendesk_chat.py @@ -44,7 +44,7 @@ class SourceZendeskChatOAuth20: -class SourceZendeskChatZendeskChat(str, Enum): +class ZendeskChat(str, Enum): ZENDESK_CHAT = 'zendesk-chat' @@ -53,10 +53,10 @@ class SourceZendeskChatZendeskChat(str, Enum): class SourceZendeskChat: start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) r"""The date from which you'd like to replicate data for Zendesk Chat API, in the format YYYY-MM-DDT00:00:00Z.""" + subdomain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('subdomain') }}) + r"""The unique subdomain of your Zendesk account (without https://). See the Zendesk docs to find your subdomain.""" credentials: Optional[SourceZendeskChatAuthorizationMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) - SOURCE_TYPE: Final[SourceZendeskChatZendeskChat] = dataclasses.field(default=SourceZendeskChatZendeskChat.ZENDESK_CHAT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) - subdomain: Optional[str] = dataclasses.field(default='', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('subdomain'), 'exclude': lambda f: f is None }}) - r"""The unique subdomain of your Zendesk account (without https://). See the Zendesk docs to find your subdomain""" + SOURCE_TYPE: Final[ZendeskChat] = dataclasses.field(default=ZendeskChat.ZENDESK_CHAT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/sourceconfiguration.py b/src/airbyte_api/models/sourceconfiguration.py index bad44051..70bca326 100644 --- a/src/airbyte_api/models/sourceconfiguration.py +++ b/src/airbyte_api/models/sourceconfiguration.py @@ -1,6 +1,7 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from __future__ import annotations +from .source_100ms import Source100ms from .source_7shifts import Source7shifts from .source_activecampaign import SourceActivecampaign from .source_agilecrm import SourceAgilecrm @@ -10,6 +11,8 @@ from .source_airtable import SourceAirtable from .source_akeneo import SourceAkeneo from .source_algolia import SourceAlgolia +from .source_alpaca_broker_api import SourceAlpacaBrokerAPI +from .source_alpha_vantage import SourceAlphaVantage from .source_amazon_ads import SourceAmazonAds from .source_amazon_seller_partner import SourceAmazonSellerPartner from .source_amazon_sqs import SourceAmazonSqs @@ -19,13 +22,18 @@ from .source_appfigures import SourceAppfigures from .source_appfollow import SourceAppfollow from .source_apple_search_ads import SourceAppleSearchAds +from .source_appsflyer import SourceAppsflyer from .source_apptivo import SourceApptivo from .source_asana import SourceAsana from .source_ashby import SourceAshby +from .source_assemblyai import SourceAssemblyai from .source_auth0 import SourceAuth0 +from .source_aviationstack import SourceAviationstack +from .source_awin_advertiser import SourceAwinAdvertiser from .source_aws_cloudtrail import SourceAwsCloudtrail from .source_azure_blob_storage import SourceAzureBlobStorage from .source_azure_table import SourceAzureTable +from .source_babelforce import SourceBabelforce from .source_bamboo_hr import SourceBambooHr from .source_basecamp import SourceBasecamp from .source_beamer import SourceBeamer @@ -34,9 +42,12 @@ from .source_bing_ads import SourceBingAds from .source_bitly import SourceBitly from .source_blogger import SourceBlogger +from .source_bluetally import SourceBluetally +from .source_boldsign import SourceBoldsign from .source_box import SourceBox from .source_braintree import SourceBraintree from .source_braze import SourceBraze +from .source_breezometer import SourceBreezometer from .source_breezy_hr import SourceBreezyHr from .source_brevo import SourceBrevo from .source_brex import SourceBrex @@ -51,6 +62,7 @@ from .source_campayn import SourceCampayn from .source_canny import SourceCanny from .source_capsule_crm import SourceCapsuleCrm +from .source_captain_data import SourceCaptainData from .source_care_quality_commission import SourceCareQualityCommission from .source_cart import SourceCart from .source_castor_edc import SourceCastorEdc @@ -59,10 +71,12 @@ from .source_chargedesk import SourceChargedesk from .source_chargify import SourceChargify from .source_chartmogul import SourceChartmogul +from .source_churnkey import SourceChurnkey from .source_cimis import SourceCimis from .source_cin7 import SourceCin7 from .source_circa import SourceCirca from .source_circleci import SourceCircleci +from .source_cisco_meraki import SourceCiscoMeraki from .source_clarif_ai import SourceClarifAi from .source_clazar import SourceClazar from .source_clickhouse import SourceClickhouse @@ -75,6 +89,7 @@ from .source_coda import SourceCoda from .source_codefresh import SourceCodefresh from .source_coin_api import SourceCoinAPI +from .source_coingecko_coins import SourceCoingeckoCoins from .source_coinmarketcap import SourceCoinmarketcap from .source_concord import SourceConcord from .source_configcat import SourceConfigcat @@ -82,35 +97,48 @@ from .source_convertkit import SourceConvertkit from .source_convex import SourceConvex from .source_copper import SourceCopper +from .source_couchbase import SourceCouchbase from .source_countercyclical import SourceCountercyclical from .source_customer_io import SourceCustomerIo +from .source_customerly import SourceCustomerly from .source_datadog import SourceDatadog from .source_datascope import SourceDatascope from .source_dbt import SourceDbt from .source_delighted import SourceDelighted from .source_deputy import SourceDeputy +from .source_ding_connect import SourceDingConnect from .source_dixa import SourceDixa from .source_dockerhub import SourceDockerhub +from .source_docuseal import SourceDocuseal +from .source_dolibarr import SourceDolibarr from .source_dremio import SourceDremio from .source_drift import SourceDrift from .source_drip import SourceDrip from .source_dropbox_sign import SourceDropboxSign +from .source_dwolla import SourceDwolla from .source_dynamodb import SourceDynamodb from .source_e_conomic import SourceEConomic from .source_easypost import SourceEasypost from .source_easypromos import SourceEasypromos +from .source_ebay_finance import SourceEbayFinance +from .source_ebay_fulfillment import SourceEbayFulfillment from .source_elasticemail import SourceElasticemail +from .source_elasticsearch import SourceElasticsearch from .source_emailoctopus import SourceEmailoctopus from .source_employment_hero import SourceEmploymentHero from .source_encharge import SourceEncharge from .source_eventbrite import SourceEventbrite from .source_eventee import SourceEventee from .source_eventzilla import SourceEventzilla +from .source_everhour import SourceEverhour from .source_exchange_rates import SourceExchangeRates from .source_ezofficeinventory import SourceEzofficeinventory from .source_facebook_marketing import SourceFacebookMarketing +from .source_facebook_pages import SourceFacebookPages from .source_factorial import SourceFactorial from .source_faker import SourceFaker +from .source_fastbill import SourceFastbill +from .source_fastly import SourceFastly from .source_fauna import SourceFauna from .source_file import SourceFile from .source_fillout import SourceFillout @@ -136,10 +164,12 @@ from .source_freshservice import SourceFreshservice from .source_front import SourceFront from .source_fulcrum import SourceFulcrum +from .source_fullstory import SourceFullstory from .source_gainsight_px import SourceGainsightPx from .source_gcs import SourceGcs from .source_getgist import SourceGetgist from .source_getlago import SourceGetlago +from .source_giphy import SourceGiphy from .source_gitbook import SourceGitbook from .source_github import SourceGithub from .source_gitlab import SourceGitlab @@ -148,6 +178,7 @@ from .source_gnews import SourceGnews from .source_gocardless import SourceGocardless from .source_goldcast import SourceGoldcast +from .source_gologin import SourceGologin from .source_gong import SourceGong from .source_google_ads import SourceGoogleAds from .source_google_analytics_data_api import SourceGoogleAnalyticsDataAPI @@ -163,35 +194,48 @@ from .source_google_webfonts import SourceGoogleWebfonts from .source_gorgias import SourceGorgias from .source_greenhouse import SourceGreenhouse +from .source_greythr import SourceGreythr from .source_gridly import SourceGridly from .source_guru import SourceGuru from .source_gutendex import SourceGutendex from .source_hardcoded_records import SourceHardcodedRecords +from .source_harness import SourceHarness from .source_harvest import SourceHarvest from .source_height import SourceHeight +from .source_hellobaton import SourceHellobaton +from .source_help_scout import SourceHelpScout from .source_hibob import SourceHibob from .source_high_level import SourceHighLevel +from .source_hoorayhr import SourceHoorayhr from .source_hubplanner import SourceHubplanner from .source_hubspot import SourceHubspot +from .source_hugging_face_datasets import SourceHuggingFaceDatasets from .source_humanitix import SourceHumanitix +from .source_huntr import SourceHuntr from .source_illumina_basespace import SourceIlluminaBasespace +from .source_imagga import SourceImagga from .source_incident_io import SourceIncidentIo from .source_inflowinventory import SourceInflowinventory +from .source_insightful import SourceInsightful from .source_insightly import SourceInsightly from .source_instagram import SourceInstagram from .source_instatus import SourceInstatus from .source_intercom import SourceIntercom +from .source_intruder import SourceIntruder from .source_invoiced import SourceInvoiced from .source_invoiceninja import SourceInvoiceninja from .source_ip2whois import SourceIp2whois from .source_iterable import SourceIterable +from .source_jamf_pro import SourceJamfPro from .source_jira import SourceJira from .source_jobnimbus import SourceJobnimbus from .source_jotform import SourceJotform +from .source_judge_me_reviews import SourceJudgeMeReviews from .source_just_sift import SourceJustSift from .source_justcall import SourceJustcall from .source_k6_cloud import SourceK6Cloud from .source_katana import SourceKatana +from .source_keka import SourceKeka from .source_kisi import SourceKisi from .source_kissmetrics import SourceKissmetrics from .source_klarna import SourceKlarna @@ -204,6 +248,7 @@ from .source_less_annoying_crm import SourceLessAnnoyingCrm from .source_lever_hiring import SourceLeverHiring from .source_lightspeed_retail import SourceLightspeedRetail +from .source_linear import SourceLinear from .source_linkedin_ads import SourceLinkedinAds from .source_linkedin_pages import SourceLinkedinPages from .source_linnworks import SourceLinnworks @@ -213,6 +258,7 @@ from .source_luma import SourceLuma from .source_mailchimp import SourceMailchimp from .source_mailerlite import SourceMailerlite +from .source_mailersend import SourceMailersend from .source_mailgun import SourceMailgun from .source_mailjet_mail import SourceMailjetMail from .source_mailjet_sms import SourceMailjetSms @@ -220,7 +266,10 @@ from .source_mailtrap import SourceMailtrap from .source_marketo import SourceMarketo from .source_marketstack import SourceMarketstack +from .source_mendeley import SourceMendeley from .source_mention import SourceMention +from .source_mercado_ads import SourceMercadoAds +from .source_merge import SourceMerge from .source_metabase import SourceMetabase from .source_microsoft_dataverse import SourceMicrosoftDataverse from .source_microsoft_entra_id import SourceMicrosoftEntraID @@ -241,9 +290,15 @@ from .source_mysql import SourceMysql from .source_n8n import SourceN8n from .source_nasa import SourceNasa +from .source_navan import SourceNavan +from .source_nebius_ai import SourceNebiusAi from .source_netsuite import SourceNetsuite +from .source_netsuite_enterprise import SourceNetsuiteEnterprise from .source_news_api import SourceNewsAPI +from .source_newsdata import SourceNewsdata from .source_newsdata_io import SourceNewsdataIo +from .source_nexiopay import SourceNexiopay +from .source_ninjaone_rmm import SourceNinjaoneRmm from .source_nocrm import SourceNocrm from .source_northpass_lms import SourceNorthpassLms from .source_notion import SourceNotion @@ -257,31 +312,41 @@ from .source_onesignal import SourceOnesignal from .source_onfleet import SourceOnfleet from .source_open_data_dc import SourceOpenDataDc +from .source_open_exchange_rates import SourceOpenExchangeRates from .source_openaq import SourceOpenaq from .source_openfda import SourceOpenfda from .source_openweather import SourceOpenweather from .source_opinion_stage import SourceOpinionStage from .source_opsgenie import SourceOpsgenie +from .source_opuswatch import SourceOpuswatch from .source_oracle import SourceOracle +from .source_oracle_enterprise import SourceOracleEnterprise from .source_orb import SourceOrb -from .source_orbit import SourceOrbit from .source_oura import SourceOura from .source_outbrain_amplify import SourceOutbrainAmplify from .source_outreach import SourceOutreach from .source_oveit import SourceOveit from .source_pabbly_subscriptions_billing import SourcePabblySubscriptionsBilling +from .source_paddle import SourcePaddle +from .source_pagerduty import SourcePagerduty from .source_pandadoc import SourcePandadoc from .source_paperform import SourcePaperform from .source_papersign import SourcePapersign from .source_pardot import SourcePardot +from .source_partnerize import SourcePartnerize +from .source_partnerstack import SourcePartnerstack +from .source_payfit import SourcePayfit from .source_paypal_transaction import SourcePaypalTransaction from .source_paystack import SourcePaystack from .source_pendo import SourcePendo from .source_pennylane import SourcePennylane +from .source_perigon import SourcePerigon from .source_persistiq import SourcePersistiq from .source_persona import SourcePersona from .source_pexels_api import SourcePexelsAPI +from .source_phyllo import SourcePhyllo from .source_picqer import SourcePicqer +from .source_pingdom import SourcePingdom from .source_pinterest import SourcePinterest from .source_pipedrive import SourcePipedrive from .source_pipeliner import SourcePipeliner @@ -293,12 +358,14 @@ from .source_pocket import SourcePocket from .source_pokeapi import SourcePokeapi from .source_polygon_stock_api import SourcePolygonStockAPI +from .source_poplar import SourcePoplar from .source_postgres import SourcePostgres from .source_posthog import SourcePosthog from .source_postmarkapp import SourcePostmarkapp from .source_prestashop import SourcePrestashop from .source_pretix import SourcePretix from .source_primetric import SourcePrimetric +from .source_printify import SourcePrintify from .source_productboard import SourceProductboard from .source_productive import SourceProductive from .source_pypi import SourcePypi @@ -316,10 +383,13 @@ from .source_rentcast import SourceRentcast from .source_repairshopr import SourceRepairshopr from .source_reply_io import SourceReplyIo +from .source_retailexpress_by_maropost import SourceRetailexpressByMaropost from .source_retently import SourceRetently from .source_revenuecat import SourceRevenuecat from .source_revolut_merchant import SourceRevolutMerchant +from .source_ringcentral import SourceRingcentral from .source_rki_covid import SourceRkiCovid +from .source_rocket_chat import SourceRocketChat from .source_rocketlane import SourceRocketlane from .source_rollbar import SourceRollbar from .source_rootly import SourceRootly @@ -332,6 +402,7 @@ from .source_salesforce import SourceSalesforce from .source_salesloft import SourceSalesloft from .source_sap_fieldglass import SourceSapFieldglass +from .source_sap_hana_enterprise import SourceSapHanaEnterprise from .source_savvycal import SourceSavvycal from .source_scryfall import SourceScryfall from .source_secoda import SourceSecoda @@ -342,14 +413,21 @@ from .source_sendpulse import SourceSendpulse from .source_senseforce import SourceSenseforce from .source_sentry import SourceSentry +from .source_serpstat import SourceSerpstat +from .source_service_now import SourceServiceNow from .source_sftp import SourceSftp from .source_sftp_bulk import SourceSftpBulk +from .source_sharepoint_enterprise import SourceSharepointEnterprise from .source_sharetribe import SourceSharetribe from .source_shippo import SourceShippo +from .source_shipstation import SourceShipstation from .source_shopify import SourceShopify +from .source_shopwired import SourceShopwired from .source_shortcut import SourceShortcut from .source_shortio import SourceShortio +from .source_shutterstock import SourceShutterstock from .source_sigma_computing import SourceSigmaComputing +from .source_signnow import SourceSignnow from .source_simfin import SourceSimfin from .source_simplecast import SourceSimplecast from .source_simplesat import SourceSimplesat @@ -366,6 +444,7 @@ from .source_spacex_api import SourceSpacexAPI from .source_sparkpost import SourceSparkpost from .source_split_io import SourceSplitIo +from .source_spotify_ads import SourceSpotifyAds from .source_spotlercrm import SourceSpotlercrm from .source_square import SourceSquare from .source_squarespace import SourceSquarespace @@ -377,20 +456,26 @@ from .source_survey_sparrow import SourceSurveySparrow from .source_surveymonkey import SourceSurveymonkey from .source_survicate import SourceSurvicate +from .source_svix import SourceSvix from .source_systeme import SourceSysteme from .source_taboola import SourceTaboola +from .source_tavus import SourceTavus from .source_teamtailor import SourceTeamtailor from .source_teamwork import SourceTeamwork from .source_tempo import SourceTempo from .source_testrail import SourceTestrail from .source_the_guardian_api import SourceTheGuardianAPI from .source_thinkific import SourceThinkific +from .source_thinkific_courses import SourceThinkificCourses +from .source_thrive_learning import SourceThriveLearning from .source_ticketmaster import SourceTicketmaster from .source_tickettailor import SourceTickettailor from .source_tiktok_marketing import SourceTiktokMarketing from .source_timely import SourceTimely from .source_tinyemail import SourceTinyemail +from .source_tmdb import SourceTmdb from .source_todoist import SourceTodoist +from .source_toggl import SourceToggl from .source_track_pms import SourceTrackPms from .source_trello import SourceTrello from .source_tremendous import SourceTremendous @@ -400,10 +485,12 @@ from .source_twilio import SourceTwilio from .source_twilio_taskrouter import SourceTwilioTaskrouter from .source_twitter import SourceTwitter +from .source_tyntec_sms import SourceTyntecSms from .source_typeform import SourceTypeform from .source_ubidots import SourceUbidots from .source_unleash import SourceUnleash from .source_uppromote import SourceUppromote +from .source_uptick import SourceUptick from .source_us_census import SourceUsCensus from .source_uservoice import SourceUservoice from .source_vantage import SourceVantage @@ -414,6 +501,7 @@ from .source_vwo import SourceVwo from .source_waiteraid import SourceWaiteraid from .source_wasabi_stats_api import SourceWasabiStatsAPI +from .source_watchmode import SourceWatchmode from .source_weatherstack import SourceWeatherstack from .source_web_scrapper import SourceWebScrapper from .source_webflow import SourceWebflow @@ -423,6 +511,7 @@ from .source_woocommerce import SourceWoocommerce from .source_wordpress import SourceWordpress from .source_workable import SourceWorkable +from .source_workday import SourceWorkday from .source_workflowmax import SourceWorkflowmax from .source_workramp import SourceWorkramp from .source_wrike import SourceWrike @@ -433,9 +522,12 @@ from .source_yandex_metrica import SourceYandexMetrica from .source_yotpo import SourceYotpo from .source_you_need_a_budget_ynab import SourceYouNeedABudgetYnab +from .source_younium import SourceYounium +from .source_yousign import SourceYousign from .source_youtube_analytics import SourceYoutubeAnalytics from .source_youtube_data import SourceYoutubeData from .source_zapier_supported_storage import SourceZapierSupportedStorage +from .source_zapsign import SourceZapsign from .source_zendesk_chat import SourceZendeskChat from .source_zendesk_sunshine import SourceZendeskSunshine from .source_zendesk_support import SourceZendeskSupport @@ -456,4 +548,4 @@ from .source_zoom import SourceZoom from typing import Union -SourceConfiguration = Union[SourceAha, Source7shifts, SourceActivecampaign, SourceAgilecrm, SourceAirbyte, SourceAircall, SourceAirtable, SourceAkeneo, SourceAlgolia, SourceAmazonAds, SourceAmazonSellerPartner, SourceAmazonSqs, SourceAmplitude, SourceApifyDataset, SourceAppcues, SourceAppfigures, SourceAppfollow, SourceAppleSearchAds, SourceApptivo, SourceAsana, SourceAshby, SourceAuth0, SourceAwsCloudtrail, SourceAzureBlobStorage, SourceAzureTable, SourceBambooHr, SourceBasecamp, SourceBeamer, SourceBigmailer, SourceBigquery, SourceBingAds, SourceBitly, SourceBlogger, SourceBox, SourceBraintree, SourceBraze, SourceBreezyHr, SourceBrevo, SourceBrex, SourceBugsnag, SourceBuildkite, SourceBunnyInc, SourceBuzzsprout, SourceCalCom, SourceCalendly, SourceCallrail, SourceCampaignMonitor, SourceCampayn, SourceCanny, SourceCapsuleCrm, SourceCareQualityCommission, SourceCart, SourceCastorEdc, SourceChameleon, SourceChargebee, SourceChargedesk, SourceChargify, SourceChartmogul, SourceCimis, SourceCin7, SourceCirca, SourceCircleci, SourceClarifAi, SourceClazar, SourceClickhouse, SourceClickupAPI, SourceClockify, SourceClockodo, SourceCloseCom, SourceCloudbeds, SourceCoassemble, SourceCoda, SourceCodefresh, SourceCoinAPI, SourceCoinmarketcap, SourceConcord, SourceConfigcat, SourceConfluence, SourceConvertkit, SourceConvex, SourceCopper, SourceCountercyclical, SourceCustomerIo, SourceDatadog, SourceDatascope, SourceDbt, SourceDelighted, SourceDeputy, SourceDixa, SourceDockerhub, SourceDremio, SourceDrift, SourceDrip, SourceDropboxSign, SourceDynamodb, SourceEConomic, SourceEasypost, SourceEasypromos, SourceElasticemail, SourceEmailoctopus, SourceEmploymentHero, SourceEncharge, SourceEventbrite, SourceEventee, SourceEventzilla, SourceExchangeRates, SourceEzofficeinventory, SourceFacebookMarketing, SourceFactorial, SourceFaker, SourceFauna, SourceFile, SourceFillout, SourceFinage, SourceFinancialModelling, SourceFinnhub, SourceFinnworlds, SourceFirebolt, SourceFirehydrant, SourceFleetio, SourceFlexmail, SourceFlexport, SourceFloat, SourceFlowlu, SourceFormbricks, SourceFreeAgentConnector, SourceFreightview, SourceFreshbooks, SourceFreshcaller, SourceFreshchat, SourceFreshdesk, SourceFreshsales, SourceFreshservice, SourceFront, SourceFulcrum, SourceGainsightPx, SourceGcs, SourceGetgist, SourceGetlago, SourceGitbook, SourceGithub, SourceGitlab, SourceGlassfrog, SourceGmail, SourceGnews, SourceGocardless, SourceGoldcast, SourceGong, SourceGoogleAds, SourceGoogleAnalyticsDataAPI, SourceGoogleCalendar, SourceGoogleClassroom, SourceGoogleDirectory, SourceGoogleDrive, SourceGoogleForms, SourceGooglePagespeedInsights, SourceGoogleSearchConsole, SourceGoogleSheets, SourceGoogleTasks, SourceGoogleWebfonts, SourceGorgias, SourceGreenhouse, SourceGridly, SourceGuru, SourceGutendex, SourceHardcodedRecords, SourceHarvest, SourceHeight, SourceHibob, SourceHighLevel, SourceHubplanner, SourceHubspot, SourceHumanitix, SourceIlluminaBasespace, SourceIncidentIo, SourceInflowinventory, SourceInsightly, SourceInstagram, SourceInstatus, SourceIntercom, SourceInvoiced, SourceInvoiceninja, SourceIp2whois, SourceIterable, SourceJira, SourceJobnimbus, SourceJotform, SourceJustSift, SourceJustcall, SourceK6Cloud, SourceKatana, SourceKisi, SourceKissmetrics, SourceKlarna, SourceKlausAPI, SourceKlaviyo, SourceKyve, SourceLaunchdarkly, SourceLeadfeeder, SourceLemlist, SourceLessAnnoyingCrm, SourceLeverHiring, SourceLightspeedRetail, SourceLinkedinAds, SourceLinkedinPages, SourceLinnworks, SourceLob, SourceLokalise, SourceLooker, SourceLuma, SourceMailchimp, SourceMailerlite, SourceMailgun, SourceMailjetMail, SourceMailjetSms, SourceMailosaur, SourceMailtrap, SourceMarketo, SourceMarketstack, SourceMention, SourceMetabase, SourceMicrosoftDataverse, SourceMicrosoftEntraID, SourceMicrosoftLists, SourceMicrosoftOnedrive, SourceMicrosoftSharepoint, SourceMicrosoftTeams, SourceMiro, SourceMissive, SourceMixmax, SourceMixpanel, SourceMode, SourceMonday, SourceMongodbV2, SourceMssql, SourceMux, SourceMyHours, SourceMysql, SourceN8n, SourceNasa, SourceNetsuite, SourceNewsAPI, SourceNewsdataIo, SourceNocrm, SourceNorthpassLms, SourceNotion, SourceNutshell, SourceNylas, SourceNytimes, SourceOkta, SourceOmnisend, SourceOncehub, SourceOnepagecrm, SourceOnesignal, SourceOnfleet, SourceOpenDataDc, SourceOpenaq, SourceOpenfda, SourceOpenweather, SourceOpinionStage, SourceOpsgenie, SourceOracle, SourceOrb, SourceOrbit, SourceOura, SourceOutbrainAmplify, SourceOutreach, SourceOveit, SourcePabblySubscriptionsBilling, SourcePandadoc, SourcePaperform, SourcePapersign, SourcePardot, SourcePaypalTransaction, SourcePaystack, SourcePendo, SourcePennylane, SourcePersistiq, SourcePersona, SourcePexelsAPI, SourcePicqer, SourcePinterest, SourcePipedrive, SourcePipeliner, SourcePivotalTracker, SourcePiwik, SourcePlaid, SourcePlanhat, SourcePlausible, SourcePocket, SourcePokeapi, SourcePolygonStockAPI, SourcePostgres, SourcePosthog, SourcePostmarkapp, SourcePrestashop, SourcePretix, SourcePrimetric, SourceProductboard, SourceProductive, SourcePypi, SourceQualaroo, SourceQuickbooks, SourceRailz, SourceRdStationMarketing, SourceRecharge, SourceRecreation, SourceRecruitee, SourceRecurly, SourceReddit, SourceRedshift, SourceReferralhero, SourceRentcast, SourceRepairshopr, SourceReplyIo, SourceRetently, SourceRevenuecat, SourceRevolutMerchant, SourceRkiCovid, SourceRocketlane, SourceRollbar, SourceRootly, SourceRss, SourceRuddr, SourceS3, SourceSafetyculture, SourceSageHr, SourceSalesflare, SourceSalesforce, SourceSalesloft, SourceSapFieldglass, SourceSavvycal, SourceScryfall, SourceSecoda, SourceSegment, SourceSendgrid, SourceSendinblue, SourceSendowl, SourceSendpulse, SourceSenseforce, SourceSentry, SourceSftp, SourceSftpBulk, SourceSharetribe, SourceShippo, SourceShopify, SourceShortcut, SourceShortio, SourceSigmaComputing, SourceSimfin, SourceSimplecast, SourceSimplesat, SourceSlack, SourceSmaily, SourceSmartengage, SourceSmartreach, SourceSmartsheets, SourceSmartwaiver, SourceSnapchatMarketing, SourceSnowflake, SourceSolarwindsServiceDesk, SourceSonarCloud, SourceSpacexAPI, SourceSparkpost, SourceSplitIo, SourceSpotlercrm, SourceSquare, SourceSquarespace, SourceStatsig, SourceStatuspage, SourceStockdata, SourceStrava, SourceStripe, SourceSurveySparrow, SourceSurveymonkey, SourceSurvicate, SourceSysteme, SourceTaboola, SourceTeamtailor, SourceTeamwork, SourceTempo, SourceTestrail, SourceTheGuardianAPI, SourceThinkific, SourceTicketmaster, SourceTickettailor, SourceTiktokMarketing, SourceTimely, SourceTinyemail, SourceTodoist, SourceTrackPms, SourceTrello, SourceTremendous, SourceTrustpilot, SourceTvmazeSchedule, SourceTwelveData, SourceTwilio, SourceTwilioTaskrouter, SourceTwitter, SourceTypeform, SourceUbidots, SourceUnleash, SourceUppromote, SourceUsCensus, SourceUservoice, SourceVantage, SourceVeeqo, SourceVercel, SourceVismaEconomic, SourceVitally, SourceVwo, SourceWaiteraid, SourceWasabiStatsAPI, SourceWeatherstack, SourceWebScrapper, SourceWebflow, SourceWhenIWork, SourceWhiskyHunter, SourceWikipediaPageviews, SourceWoocommerce, SourceWordpress, SourceWorkable, SourceWorkflowmax, SourceWorkramp, SourceWrike, SourceWufoo, SourceXkcd, SourceXsolla, SourceYahooFinancePrice, SourceYandexMetrica, SourceYotpo, SourceYouNeedABudgetYnab, SourceYoutubeAnalytics, SourceYoutubeData, SourceZapierSupportedStorage, SourceZendeskChat, SourceZendeskSunshine, SourceZendeskSupport, SourceZendeskTalk, SourceZenefits, SourceZenloop, SourceZohoAnalyticsMetadataAPI, SourceZohoBigin, SourceZohoBilling, SourceZohoBooks, SourceZohoCampaign, SourceZohoCrm, SourceZohoDesk, SourceZohoExpense, SourceZohoInventory, SourceZohoInvoice, SourceZonkaFeedback, SourceZoom] +SourceConfiguration = Union[SourceAha, Source100ms, Source7shifts, SourceActivecampaign, SourceAgilecrm, SourceAirbyte, SourceAircall, SourceAirtable, SourceAkeneo, SourceAlgolia, SourceAlpacaBrokerAPI, SourceAlphaVantage, SourceAmazonAds, SourceAmazonSellerPartner, SourceAmazonSqs, SourceAmplitude, SourceApifyDataset, SourceAppcues, SourceAppfigures, SourceAppfollow, SourceAppleSearchAds, SourceAppsflyer, SourceApptivo, SourceAsana, SourceAshby, SourceAssemblyai, SourceAuth0, SourceAviationstack, SourceAwinAdvertiser, SourceAwsCloudtrail, SourceAzureBlobStorage, SourceAzureTable, SourceBabelforce, SourceBambooHr, SourceBasecamp, SourceBeamer, SourceBigmailer, SourceBigquery, SourceBingAds, SourceBitly, SourceBlogger, SourceBluetally, SourceBoldsign, SourceBox, SourceBraintree, SourceBraze, SourceBreezometer, SourceBreezyHr, SourceBrevo, SourceBrex, SourceBugsnag, SourceBuildkite, SourceBunnyInc, SourceBuzzsprout, SourceCalCom, SourceCalendly, SourceCallrail, SourceCampaignMonitor, SourceCampayn, SourceCanny, SourceCapsuleCrm, SourceCaptainData, SourceCareQualityCommission, SourceCart, SourceCastorEdc, SourceChameleon, SourceChargebee, SourceChargedesk, SourceChargify, SourceChartmogul, SourceChurnkey, SourceCimis, SourceCin7, SourceCirca, SourceCircleci, SourceCiscoMeraki, SourceClarifAi, SourceClazar, SourceClickhouse, SourceClickupAPI, SourceClockify, SourceClockodo, SourceCloseCom, SourceCloudbeds, SourceCoassemble, SourceCoda, SourceCodefresh, SourceCoinAPI, SourceCoingeckoCoins, SourceCoinmarketcap, SourceConcord, SourceConfigcat, SourceConfluence, SourceConvertkit, SourceConvex, SourceCopper, SourceCouchbase, SourceCountercyclical, SourceCustomerIo, SourceCustomerly, SourceDatadog, SourceDatascope, SourceDbt, SourceDelighted, SourceDeputy, SourceDingConnect, SourceDixa, SourceDockerhub, SourceDocuseal, SourceDolibarr, SourceDremio, SourceDrift, SourceDrip, SourceDropboxSign, SourceDwolla, SourceDynamodb, SourceEConomic, SourceEasypost, SourceEasypromos, SourceEbayFinance, SourceEbayFulfillment, SourceElasticemail, SourceElasticsearch, SourceEmailoctopus, SourceEmploymentHero, SourceEncharge, SourceEventbrite, SourceEventee, SourceEventzilla, SourceEverhour, SourceExchangeRates, SourceEzofficeinventory, SourceFacebookMarketing, SourceFacebookPages, SourceFactorial, SourceFaker, SourceFastbill, SourceFastly, SourceFauna, SourceFile, SourceFillout, SourceFinage, SourceFinancialModelling, SourceFinnhub, SourceFinnworlds, SourceFirebolt, SourceFirehydrant, SourceFleetio, SourceFlexmail, SourceFlexport, SourceFloat, SourceFlowlu, SourceFormbricks, SourceFreeAgentConnector, SourceFreightview, SourceFreshbooks, SourceFreshcaller, SourceFreshchat, SourceFreshdesk, SourceFreshsales, SourceFreshservice, SourceFront, SourceFulcrum, SourceFullstory, SourceGainsightPx, SourceGcs, SourceGetgist, SourceGetlago, SourceGiphy, SourceGitbook, SourceGithub, SourceGitlab, SourceGlassfrog, SourceGmail, SourceGnews, SourceGocardless, SourceGoldcast, SourceGologin, SourceGong, SourceGoogleAds, SourceGoogleAnalyticsDataAPI, SourceGoogleCalendar, SourceGoogleClassroom, SourceGoogleDirectory, SourceGoogleDrive, SourceGoogleForms, SourceGooglePagespeedInsights, SourceGoogleSearchConsole, SourceGoogleSheets, SourceGoogleTasks, SourceGoogleWebfonts, SourceGorgias, SourceGreenhouse, SourceGreythr, SourceGridly, SourceGuru, SourceGutendex, SourceHardcodedRecords, SourceHarness, SourceHarvest, SourceHeight, SourceHellobaton, SourceHelpScout, SourceHibob, SourceHighLevel, SourceHoorayhr, SourceHubplanner, SourceHubspot, SourceHuggingFaceDatasets, SourceHumanitix, SourceHuntr, SourceIlluminaBasespace, SourceImagga, SourceIncidentIo, SourceInflowinventory, SourceInsightful, SourceInsightly, SourceInstagram, SourceInstatus, SourceIntercom, SourceIntruder, SourceInvoiced, SourceInvoiceninja, SourceIp2whois, SourceIterable, SourceJamfPro, SourceJira, SourceJobnimbus, SourceJotform, SourceJudgeMeReviews, SourceJustSift, SourceJustcall, SourceK6Cloud, SourceKatana, SourceKeka, SourceKisi, SourceKissmetrics, SourceKlarna, SourceKlausAPI, SourceKlaviyo, SourceKyve, SourceLaunchdarkly, SourceLeadfeeder, SourceLemlist, SourceLessAnnoyingCrm, SourceLeverHiring, SourceLightspeedRetail, SourceLinear, SourceLinkedinAds, SourceLinkedinPages, SourceLinnworks, SourceLob, SourceLokalise, SourceLooker, SourceLuma, SourceMailchimp, SourceMailerlite, SourceMailersend, SourceMailgun, SourceMailjetMail, SourceMailjetSms, SourceMailosaur, SourceMailtrap, SourceMarketo, SourceMarketstack, SourceMendeley, SourceMention, SourceMercadoAds, SourceMerge, SourceMetabase, SourceMicrosoftDataverse, SourceMicrosoftEntraID, SourceMicrosoftLists, SourceMicrosoftOnedrive, SourceMicrosoftSharepoint, SourceMicrosoftTeams, SourceMiro, SourceMissive, SourceMixmax, SourceMixpanel, SourceMode, SourceMonday, SourceMongodbV2, SourceMssql, SourceMux, SourceMyHours, SourceMysql, SourceN8n, SourceNasa, SourceNavan, SourceNebiusAi, SourceNetsuite, SourceNetsuiteEnterprise, SourceNewsAPI, SourceNewsdata, SourceNewsdataIo, SourceNexiopay, SourceNinjaoneRmm, SourceNocrm, SourceNorthpassLms, SourceNotion, SourceNutshell, SourceNylas, SourceNytimes, SourceOkta, SourceOmnisend, SourceOncehub, SourceOnepagecrm, SourceOnesignal, SourceOnfleet, SourceOpenDataDc, SourceOpenExchangeRates, SourceOpenaq, SourceOpenfda, SourceOpenweather, SourceOpinionStage, SourceOpsgenie, SourceOpuswatch, SourceOracle, SourceOracleEnterprise, SourceOrb, SourceOura, SourceOutbrainAmplify, SourceOutreach, SourceOveit, SourcePabblySubscriptionsBilling, SourcePaddle, SourcePagerduty, SourcePandadoc, SourcePaperform, SourcePapersign, SourcePardot, SourcePartnerize, SourcePartnerstack, SourcePayfit, SourcePaypalTransaction, SourcePaystack, SourcePendo, SourcePennylane, SourcePerigon, SourcePersistiq, SourcePersona, SourcePexelsAPI, SourcePhyllo, SourcePicqer, SourcePingdom, SourcePinterest, SourcePipedrive, SourcePipeliner, SourcePivotalTracker, SourcePiwik, SourcePlaid, SourcePlanhat, SourcePlausible, SourcePocket, SourcePokeapi, SourcePolygonStockAPI, SourcePoplar, SourcePostgres, SourcePosthog, SourcePostmarkapp, SourcePrestashop, SourcePretix, SourcePrimetric, SourcePrintify, SourceProductboard, SourceProductive, SourcePypi, SourceQualaroo, SourceQuickbooks, SourceRailz, SourceRdStationMarketing, SourceRecharge, SourceRecreation, SourceRecruitee, SourceRecurly, SourceReddit, SourceRedshift, SourceReferralhero, SourceRentcast, SourceRepairshopr, SourceReplyIo, SourceRetailexpressByMaropost, SourceRetently, SourceRevenuecat, SourceRevolutMerchant, SourceRingcentral, SourceRkiCovid, SourceRocketChat, SourceRocketlane, SourceRollbar, SourceRootly, SourceRss, SourceRuddr, SourceS3, SourceSafetyculture, SourceSageHr, SourceSalesflare, SourceSalesforce, SourceSalesloft, SourceSapFieldglass, SourceSapHanaEnterprise, SourceSavvycal, SourceScryfall, SourceSecoda, SourceSegment, SourceSendgrid, SourceSendinblue, SourceSendowl, SourceSendpulse, SourceSenseforce, SourceSentry, SourceSerpstat, SourceServiceNow, SourceSftp, SourceSftpBulk, SourceSharepointEnterprise, SourceSharetribe, SourceShippo, SourceShipstation, SourceShopify, SourceShopwired, SourceShortcut, SourceShortio, SourceShutterstock, SourceSigmaComputing, SourceSignnow, SourceSimfin, SourceSimplecast, SourceSimplesat, SourceSlack, SourceSmaily, SourceSmartengage, SourceSmartreach, SourceSmartsheets, SourceSmartwaiver, SourceSnapchatMarketing, SourceSnowflake, SourceSolarwindsServiceDesk, SourceSonarCloud, SourceSpacexAPI, SourceSparkpost, SourceSplitIo, SourceSpotifyAds, SourceSpotlercrm, SourceSquare, SourceSquarespace, SourceStatsig, SourceStatuspage, SourceStockdata, SourceStrava, SourceStripe, SourceSurveySparrow, SourceSurveymonkey, SourceSurvicate, SourceSvix, SourceSysteme, SourceTaboola, SourceTavus, SourceTeamtailor, SourceTeamwork, SourceTempo, SourceTestrail, SourceTheGuardianAPI, SourceThinkific, SourceThinkificCourses, SourceThriveLearning, SourceTicketmaster, SourceTickettailor, SourceTiktokMarketing, SourceTimely, SourceTinyemail, SourceTmdb, SourceTodoist, SourceToggl, SourceTrackPms, SourceTrello, SourceTremendous, SourceTrustpilot, SourceTvmazeSchedule, SourceTwelveData, SourceTwilio, SourceTwilioTaskrouter, SourceTwitter, SourceTyntecSms, SourceTypeform, SourceUbidots, SourceUnleash, SourceUppromote, SourceUptick, SourceUsCensus, SourceUservoice, SourceVantage, SourceVeeqo, SourceVercel, SourceVismaEconomic, SourceVitally, SourceVwo, SourceWaiteraid, SourceWasabiStatsAPI, SourceWatchmode, SourceWeatherstack, SourceWebScrapper, SourceWebflow, SourceWhenIWork, SourceWhiskyHunter, SourceWikipediaPageviews, SourceWoocommerce, SourceWordpress, SourceWorkable, SourceWorkday, SourceWorkflowmax, SourceWorkramp, SourceWrike, SourceWufoo, SourceXkcd, SourceXsolla, SourceYahooFinancePrice, SourceYandexMetrica, SourceYotpo, SourceYouNeedABudgetYnab, SourceYounium, SourceYousign, SourceYoutubeAnalytics, SourceYoutubeData, SourceZapierSupportedStorage, SourceZapsign, SourceZendeskChat, SourceZendeskSunshine, SourceZendeskSupport, SourceZendeskTalk, SourceZenefits, SourceZenloop, SourceZohoAnalyticsMetadataAPI, SourceZohoBigin, SourceZohoBilling, SourceZohoBooks, SourceZohoCampaign, SourceZohoCrm, SourceZohoDesk, SourceZohoExpense, SourceZohoInventory, SourceZohoInvoice, SourceZonkaFeedback, SourceZoom] diff --git a/src/airbyte_api/models/sourcecreaterequest.py b/src/airbyte_api/models/sourcecreaterequest.py index fdeb9864..0dd3898e 100644 --- a/src/airbyte_api/models/sourcecreaterequest.py +++ b/src/airbyte_api/models/sourcecreaterequest.py @@ -2,6 +2,7 @@ from __future__ import annotations import dataclasses +from .scopedresourcerequirements import ScopedResourceRequirements from .sourceconfiguration import SourceConfiguration from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json @@ -18,6 +19,8 @@ class SourceCreateRequest: workspace_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspaceId') }}) definition_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('definitionId'), 'exclude': lambda f: f is None }}) r"""The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.""" + resource_allocation: Optional[ScopedResourceRequirements] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('resourceAllocation'), 'exclude': lambda f: f is None }}) + r"""actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.""" secret_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secretId'), 'exclude': lambda f: f is None }}) r"""Optional secretID obtained through the OAuth redirect flow.""" diff --git a/src/airbyte_api/models/sourcepatchrequest.py b/src/airbyte_api/models/sourcepatchrequest.py index 0c8b2f97..2a61f2db 100644 --- a/src/airbyte_api/models/sourcepatchrequest.py +++ b/src/airbyte_api/models/sourcepatchrequest.py @@ -2,6 +2,7 @@ from __future__ import annotations import dataclasses +from .scopedresourcerequirements import ScopedResourceRequirements from .sourceconfiguration import SourceConfiguration from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json @@ -14,6 +15,8 @@ class SourcePatchRequest: configuration: Optional[SourceConfiguration] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('configuration'), 'exclude': lambda f: f is None }}) r"""The values required to configure the source.""" name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name'), 'exclude': lambda f: f is None }}) + resource_allocation: Optional[ScopedResourceRequirements] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('resourceAllocation'), 'exclude': lambda f: f is None }}) + r"""actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.""" secret_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('secretId'), 'exclude': lambda f: f is None }}) r"""Optional secretID obtained through the OAuth redirect flow.""" workspace_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspaceId'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/sourceputrequest.py b/src/airbyte_api/models/sourceputrequest.py index 52a1a56c..eb61ca7e 100644 --- a/src/airbyte_api/models/sourceputrequest.py +++ b/src/airbyte_api/models/sourceputrequest.py @@ -2,9 +2,11 @@ from __future__ import annotations import dataclasses +from .scopedresourcerequirements import ScopedResourceRequirements from .sourceconfiguration import SourceConfiguration from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json +from typing import Optional @dataclass_json(undefined=Undefined.EXCLUDE) @@ -13,5 +15,7 @@ class SourcePutRequest: configuration: SourceConfiguration = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('configuration') }}) r"""The values required to configure the source.""" name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + resource_allocation: Optional[ScopedResourceRequirements] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('resourceAllocation'), 'exclude': lambda f: f is None }}) + r"""actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.""" diff --git a/src/airbyte_api/models/sourceresponse.py b/src/airbyte_api/models/sourceresponse.py index 36bb175b..275f9e6f 100644 --- a/src/airbyte_api/models/sourceresponse.py +++ b/src/airbyte_api/models/sourceresponse.py @@ -2,9 +2,11 @@ from __future__ import annotations import dataclasses +from .scopedresourcerequirements import ScopedResourceRequirements from .sourceconfiguration import SourceConfiguration from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json +from typing import Optional @dataclass_json(undefined=Undefined.EXCLUDE) @@ -19,5 +21,7 @@ class SourceResponse: source_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceId') }}) source_type: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) workspace_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspaceId') }}) + resource_allocation: Optional[ScopedResourceRequirements] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('resourceAllocation'), 'exclude': lambda f: f is None }}) + r"""actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.""" diff --git a/src/airbyte_api/models/streamconfiguration.py b/src/airbyte_api/models/streamconfiguration.py index 257e76a6..7811bab6 100644 --- a/src/airbyte_api/models/streamconfiguration.py +++ b/src/airbyte_api/models/streamconfiguration.py @@ -17,8 +17,14 @@ class StreamConfiguration: name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) cursor_field: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cursorField'), 'exclude': lambda f: f is None }}) r"""Path to the field that will be used to determine if a record is new or modified since the last sync. This field is REQUIRED if `sync_mode` is `incremental` unless there is a default.""" + destination_object_name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationObjectName'), 'exclude': lambda f: f is None }}) + r"""The name of the destination object that this stream will be written to, used for data activation destinations.""" + include_files: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('includeFiles'), 'exclude': lambda f: f is None }}) + r"""Whether to move raw files from the source to the destination during the sync.""" mappers: Optional[List[ConfiguredStreamMapper]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mappers'), 'exclude': lambda f: f is None }}) r"""Mappers that should be applied to the stream before writing to the destination.""" + namespace: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('namespace'), 'exclude': lambda f: f is None }}) + r"""Namespace of the stream.""" primary_key: Optional[List[List[str]]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('primaryKey'), 'exclude': lambda f: f is None }}) r"""Paths to the fields that will be used as primary key. This field is REQUIRED if `destination_sync_mode` is `*_dedup` unless it is already supplied by the source schema.""" selected_fields: Optional[List[SelectedFieldInfo]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('selectedFields'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/streamconfigurations_input.py b/src/airbyte_api/models/streamconfigurations_input.py new file mode 100644 index 00000000..7855df66 --- /dev/null +++ b/src/airbyte_api/models/streamconfigurations_input.py @@ -0,0 +1,17 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from .streamconfiguration import StreamConfiguration +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import List, Optional + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class StreamConfigurationsInput: + r"""A list of configured stream options for a connection.""" + streams: Optional[List[StreamConfiguration]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('streams'), 'exclude': lambda f: f is None }}) + + diff --git a/src/airbyte_api/models/streamproperties.py b/src/airbyte_api/models/streamproperties.py index 8b1340cf..3a1e32df 100644 --- a/src/airbyte_api/models/streamproperties.py +++ b/src/airbyte_api/models/streamproperties.py @@ -17,6 +17,7 @@ class StreamProperties: source_defined_cursor_field: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceDefinedCursorField'), 'exclude': lambda f: f is None }}) source_defined_primary_key: Optional[List[List[str]]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceDefinedPrimaryKey'), 'exclude': lambda f: f is None }}) stream_name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('streamName'), 'exclude': lambda f: f is None }}) + streamnamespace: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('streamnamespace'), 'exclude': lambda f: f is None }}) sync_modes: Optional[List[ConnectionSyncModeEnum]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('syncModes'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/tag.py b/src/airbyte_api/models/tag.py new file mode 100644 index 00000000..2973063d --- /dev/null +++ b/src/airbyte_api/models/tag.py @@ -0,0 +1,18 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class Tag: + r"""A tag that can be associated with a connection. Useful for grouping and organizing connections in a workspace.""" + color: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('color') }}) + name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + tag_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagId') }}) + workspace_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspaceId') }}) + + diff --git a/src/airbyte_api/models/tagcreaterequest.py b/src/airbyte_api/models/tagcreaterequest.py new file mode 100644 index 00000000..57c4ec78 --- /dev/null +++ b/src/airbyte_api/models/tagcreaterequest.py @@ -0,0 +1,16 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class TagCreateRequest: + color: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('color') }}) + name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + workspace_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspaceId') }}) + + diff --git a/src/airbyte_api/models/tagpatchrequest.py b/src/airbyte_api/models/tagpatchrequest.py new file mode 100644 index 00000000..e1391890 --- /dev/null +++ b/src/airbyte_api/models/tagpatchrequest.py @@ -0,0 +1,15 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class TagPatchRequest: + color: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('color') }}) + name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + + diff --git a/src/airbyte_api/models/tagresponse.py b/src/airbyte_api/models/tagresponse.py new file mode 100644 index 00000000..bf8aa6c7 --- /dev/null +++ b/src/airbyte_api/models/tagresponse.py @@ -0,0 +1,19 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class TagResponse: + r"""Provides details of a single tag.""" + color: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('color') }}) + r"""A hexadecimal color value""" + name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + tag_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tagId') }}) + workspace_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspaceId') }}) + + diff --git a/src/airbyte_api/models/tagsresponse.py b/src/airbyte_api/models/tagsresponse.py new file mode 100644 index 00000000..b333afe2 --- /dev/null +++ b/src/airbyte_api/models/tagsresponse.py @@ -0,0 +1,16 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from .tagresponse import TagResponse +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import List + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class TagsResponse: + data: List[TagResponse] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data') }}) + + diff --git a/src/airbyte_api/models/updatedeclarativesourcedefinitionrequest.py b/src/airbyte_api/models/updatedeclarativesourcedefinitionrequest.py new file mode 100644 index 00000000..5d7f6e15 --- /dev/null +++ b/src/airbyte_api/models/updatedeclarativesourcedefinitionrequest.py @@ -0,0 +1,16 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import Any + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class UpdateDeclarativeSourceDefinitionRequest: + manifest: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('manifest') }}) + r"""Low code CDK manifest JSON object""" + + diff --git a/src/airbyte_api/models/updatedefinitionrequest.py b/src/airbyte_api/models/updatedefinitionrequest.py new file mode 100644 index 00000000..6687f685 --- /dev/null +++ b/src/airbyte_api/models/updatedefinitionrequest.py @@ -0,0 +1,15 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class UpdateDefinitionRequest: + docker_image_tag: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dockerImageTag') }}) + name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + + diff --git a/src/airbyte_api/models/webhooknotificationconfig.py b/src/airbyte_api/models/webhooknotificationconfig.py new file mode 100644 index 00000000..ed1e1221 --- /dev/null +++ b/src/airbyte_api/models/webhooknotificationconfig.py @@ -0,0 +1,17 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import Optional + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class WebhookNotificationConfig: + r"""Configures a webhook notification.""" + enabled: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('enabled'), 'exclude': lambda f: f is None }}) + url: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url'), 'exclude': lambda f: f is None }}) + + diff --git a/src/airbyte_api/models/workspacecreaterequest.py b/src/airbyte_api/models/workspacecreaterequest.py index 8a3e6dc4..60538c6e 100644 --- a/src/airbyte_api/models/workspacecreaterequest.py +++ b/src/airbyte_api/models/workspacecreaterequest.py @@ -2,6 +2,7 @@ from __future__ import annotations import dataclasses +from .notificationsconfig import NotificationsConfig from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from typing import Optional @@ -12,7 +13,10 @@ class WorkspaceCreateRequest: name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) r"""Name of the workspace""" + notifications: Optional[NotificationsConfig] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('notifications'), 'exclude': lambda f: f is None }}) + r"""Configures workspace notifications.""" organization_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('organizationId'), 'exclude': lambda f: f is None }}) r"""ID of organization to add workspace to.""" + region_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('regionId'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/workspaceoauthcredentialsrequest.py b/src/airbyte_api/models/workspaceoauthcredentialsrequest.py index 9f52108d..a3e32043 100644 --- a/src/airbyte_api/models/workspaceoauthcredentialsrequest.py +++ b/src/airbyte_api/models/workspaceoauthcredentialsrequest.py @@ -4,9 +4,9 @@ import dataclasses from .actortypeenum import ActorTypeEnum from .oauthactornames import OAuthActorNames -from .oauthcredentialsconfiguration import OAuthCredentialsConfiguration from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json +from typing import Any @dataclass_json(undefined=Undefined.EXCLUDE) @@ -15,7 +15,7 @@ class WorkspaceOAuthCredentialsRequest: r"""POST body for creating/updating workspace level OAuth credentials""" actor_type: ActorTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('actorType') }}) r"""Whether you're setting this override for a source or destination""" - configuration: OAuthCredentialsConfiguration = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('configuration') }}) + configuration: Any = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('configuration') }}) r"""The values required to configure the source.""" name: OAuthActorNames = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) diff --git a/src/airbyte_api/models/workspaceresponse.py b/src/airbyte_api/models/workspaceresponse.py index 781615d2..7d6ccecc 100644 --- a/src/airbyte_api/models/workspaceresponse.py +++ b/src/airbyte_api/models/workspaceresponse.py @@ -2,18 +2,19 @@ from __future__ import annotations import dataclasses -from .geographyenum import GeographyEnum +from .notificationsconfig import NotificationsConfig from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json -from typing import Optional @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class WorkspaceResponse: r"""Provides details of a single workspace.""" + data_residency: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataResidency') }}) name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + notifications: NotificationsConfig = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('notifications') }}) + r"""Configures workspace notifications.""" workspace_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('workspaceId') }}) - data_residency: Optional[GeographyEnum] = dataclasses.field(default=GeographyEnum.AUTO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataResidency'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/workspaceupdaterequest.py b/src/airbyte_api/models/workspaceupdaterequest.py index a2ab0b89..a6d14f8c 100644 --- a/src/airbyte_api/models/workspaceupdaterequest.py +++ b/src/airbyte_api/models/workspaceupdaterequest.py @@ -2,14 +2,19 @@ from __future__ import annotations import dataclasses +from .notificationsconfig import NotificationsConfig from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json +from typing import Optional @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class WorkspaceUpdateRequest: - name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) + name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name'), 'exclude': lambda f: f is None }}) r"""Name of the workspace""" + notifications: Optional[NotificationsConfig] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('notifications'), 'exclude': lambda f: f is None }}) + r"""Configures workspace notifications.""" + region_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('regionId'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/organizations.py b/src/airbyte_api/organizations.py index af2c4393..76e1349c 100644 --- a/src/airbyte_api/organizations.py +++ b/src/airbyte_api/organizations.py @@ -14,6 +14,65 @@ def __init__(self, sdk_config: SDKConfiguration) -> None: + def create_or_update_organization_o_auth_credentials(self, request: api.CreateOrUpdateOrganizationOAuthCredentialsRequest) -> api.CreateOrUpdateOrganizationOAuthCredentialsResponse: + r"""Create OAuth override credentials for an organization and source type. + Create/update a set of OAuth credentials to override the Airbyte-provided OAuth credentials used for source/destination OAuth. + In order to determine what the credential configuration needs to be, please see the connector specification of the relevant source/destination. + """ + hook_ctx = HookContext(operation_id='createOrUpdateOrganizationOAuthCredentials', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/organizations/{organizationId}/oauthCredentials', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + req_content_type, data, form = utils.serialize_request_body(request, api.CreateOrUpdateOrganizationOAuthCredentialsRequest, "organization_o_auth_credentials_request", False, False, 'json') + if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): + headers['content-type'] = req_content_type + if data is None and form is None: + raise Exception('request body is required') + headers['Accept'] = '*/*' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('PUT', url, params=query_params, data=data, files=form, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['400','403','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.CreateOrUpdateOrganizationOAuthCredentialsResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + pass + elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + def list_organizations_for_user(self) -> api.ListOrganizationsForUserResponse: r"""List all organizations for a user Lists users organizations. @@ -62,7 +121,9 @@ def list_organizations_for_user(self) -> api.ListOrganizationsForUserResponse: else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) diff --git a/src/airbyte_api/permissions.py b/src/airbyte_api/permissions.py index 0525dbb2..6b9fca7a 100644 --- a/src/airbyte_api/permissions.py +++ b/src/airbyte_api/permissions.py @@ -65,7 +65,9 @@ def create_permission(self, request: models.PermissionCreateRequest) -> api.Crea else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -114,7 +116,9 @@ def delete_permission(self, request: api.DeletePermissionRequest) -> api.DeleteP if http_res.status_code == 204: pass - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code == 422 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code == 422 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -169,7 +173,9 @@ def get_permission(self, request: api.GetPermissionRequest) -> api.GetPermission else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code == 422 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code == 422 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -225,7 +231,9 @@ def list_permissions(self, request: api.ListPermissionsRequest) -> api.ListPermi else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -285,7 +293,9 @@ def update_permission(self, request: api.UpdatePermissionRequest) -> api.UpdateP else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code == 422 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code == 422 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) diff --git a/src/airbyte_api/sdk.py b/src/airbyte_api/sdk.py index 781399a4..d17dc38a 100644 --- a/src/airbyte_api/sdk.py +++ b/src/airbyte_api/sdk.py @@ -2,14 +2,18 @@ import requests as requests_http from .connections import Connections +from .declarativesourcedefinitions import DeclarativeSourceDefinitions +from .destinationdefinitions import DestinationDefinitions from .destinations import Destinations from .health import Health from .jobs import Jobs from .organizations import Organizations from .permissions import Permissions from .sdkconfiguration import SDKConfiguration +from .sourcedefinitions import SourceDefinitions from .sources import Sources from .streams import Streams +from .tags import Tags from .users import Users from .utils.retries import RetryConfig from .workspaces import Workspaces @@ -27,8 +31,12 @@ class AirbyteAPI: permissions: Permissions sources: Sources streams: Streams + tags: Tags users: Users workspaces: Workspaces + declarative_source_definitions: DeclarativeSourceDefinitions + destination_definitions: DestinationDefinitions + source_definitions: SourceDefinitions sdk_configuration: SDKConfiguration @@ -93,5 +101,9 @@ def _init_sdks(self): self.permissions = Permissions(self.sdk_configuration) self.sources = Sources(self.sdk_configuration) self.streams = Streams(self.sdk_configuration) + self.tags = Tags(self.sdk_configuration) self.users = Users(self.sdk_configuration) self.workspaces = Workspaces(self.sdk_configuration) + self.declarative_source_definitions = DeclarativeSourceDefinitions(self.sdk_configuration) + self.destination_definitions = DestinationDefinitions(self.sdk_configuration) + self.source_definitions = SourceDefinitions(self.sdk_configuration) diff --git a/src/airbyte_api/sdkconfiguration.py b/src/airbyte_api/sdkconfiguration.py index 647a6e50..89b8fe98 100644 --- a/src/airbyte_api/sdkconfiguration.py +++ b/src/airbyte_api/sdkconfiguration.py @@ -24,9 +24,9 @@ class SDKConfiguration: server_idx: Optional[int] = 0 language: str = 'python' openapi_doc_version: str = '1.0.0' - sdk_version: str = '0.52.2' - gen_version: str = '2.474.15' - user_agent: str = 'speakeasy-sdk/python 0.52.2 2.474.15 1.0.0 airbyte-api' + sdk_version: str = '0.53.0' + gen_version: str = '2.536.0' + user_agent: str = 'speakeasy-sdk/python 0.53.0 2.536.0 1.0.0 airbyte-api' retry_config: Optional[RetryConfig] = None def __post_init__(self): diff --git a/src/airbyte_api/sourcedefinitions.py b/src/airbyte_api/sourcedefinitions.py new file mode 100644 index 00000000..70bcb27e --- /dev/null +++ b/src/airbyte_api/sourcedefinitions.py @@ -0,0 +1,311 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +import requests as requests_http +from .sdkconfiguration import SDKConfiguration +from airbyte_api import api, errors, models, utils +from airbyte_api._hooks import AfterErrorContext, AfterSuccessContext, BeforeRequestContext, HookContext +from typing import Optional + +class SourceDefinitions: + sdk_configuration: SDKConfiguration + + def __init__(self, sdk_config: SDKConfiguration) -> None: + self.sdk_configuration = sdk_config + + + + def create_source_definition(self, request: api.CreateSourceDefinitionRequest) -> api.CreateSourceDefinitionResponse: + r"""Create a source definition.""" + hook_ctx = HookContext(operation_id='createSourceDefinition', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/workspaces/{workspaceId}/definitions/sources', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + req_content_type, data, form = utils.serialize_request_body(request, api.CreateSourceDefinitionRequest, "create_definition_request", False, False, 'json') + if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): + headers['content-type'] = req_content_type + if data is None and form is None: + raise Exception('request body is required') + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('POST', url, params=query_params, data=data, files=form, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.CreateSourceDefinitionResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.DefinitionResponse]) + res.definition_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + + def delete_source_definition(self, request: api.DeleteSourceDefinitionRequest) -> api.DeleteSourceDefinitionResponse: + r"""Delete a source definition.""" + hook_ctx = HookContext(operation_id='deleteSourceDefinition', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/workspaces/{workspaceId}/definitions/sources/{definitionId}', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('DELETE', url, params=query_params, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.DeleteSourceDefinitionResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.DefinitionResponse]) + res.definition_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + + def get_source_definition(self, request: api.GetSourceDefinitionRequest) -> api.GetSourceDefinitionResponse: + r"""Get source definition details.""" + hook_ctx = HookContext(operation_id='getSourceDefinition', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/workspaces/{workspaceId}/definitions/sources/{definitionId}', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.GetSourceDefinitionResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.DefinitionResponse]) + res.definition_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + + def list_source_definitions(self, request: api.ListSourceDefinitionsRequest) -> api.ListSourceDefinitionsResponse: + r"""List source definitions.""" + hook_ctx = HookContext(operation_id='listSourceDefinitions', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/workspaces/{workspaceId}/definitions/sources', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.ListSourceDefinitionsResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.DefinitionsResponse]) + res.definitions_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + + def update_source_definition(self, request: api.UpdateSourceDefinitionRequest) -> api.UpdateSourceDefinitionResponse: + r"""Update source definition details.""" + hook_ctx = HookContext(operation_id='updateSourceDefinition', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/workspaces/{workspaceId}/definitions/sources/{definitionId}', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + req_content_type, data, form = utils.serialize_request_body(request, api.UpdateSourceDefinitionRequest, "update_definition_request", False, False, 'json') + if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): + headers['content-type'] = req_content_type + if data is None and form is None: + raise Exception('request body is required') + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('PUT', url, params=query_params, data=data, files=form, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.UpdateSourceDefinitionResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.DefinitionResponse]) + res.definition_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + diff --git a/src/airbyte_api/sources.py b/src/airbyte_api/sources.py index a1c9f090..f864c4e9 100644 --- a/src/airbyte_api/sources.py +++ b/src/airbyte_api/sources.py @@ -65,7 +65,9 @@ def create_source(self, request: Optional[models.SourceCreateRequest] = None) -> else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -114,7 +116,9 @@ def delete_source(self, request: api.DeleteSourceRequest) -> api.DeleteSourceRes if http_res.status_code == 204: pass - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -135,6 +139,7 @@ def get_source(self, request: api.GetSourceRequest) -> api.GetSourceResponse: else: headers, query_params = utils.get_security(self.sdk_configuration.security) + query_params = { **utils.get_query_params(request), **query_params } headers['Accept'] = 'application/json' headers['user-agent'] = self.sdk_configuration.user_agent client = self.sdk_configuration.client @@ -169,7 +174,9 @@ def get_source(self, request: api.GetSourceRequest) -> api.GetSourceResponse: else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -229,7 +236,9 @@ def initiate_o_auth(self, request: models.InitiateOauthRequest) -> api.InitiateO if http_res.status_code == 200: pass - elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -285,7 +294,9 @@ def list_sources(self, request: api.ListSourcesRequest) -> api.ListSourcesRespon else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -343,7 +354,9 @@ def patch_source(self, request: api.PatchSourceRequest) -> api.PatchSourceRespon else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -401,7 +414,9 @@ def put_source(self, request: api.PutSourceRequest) -> api.PutSourceResponse: else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) diff --git a/src/airbyte_api/streams.py b/src/airbyte_api/streams.py index 8707d994..420825b2 100644 --- a/src/airbyte_api/streams.py +++ b/src/airbyte_api/streams.py @@ -61,7 +61,9 @@ def get_stream_properties(self, request: api.GetStreamPropertiesRequest) -> api. else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) diff --git a/src/airbyte_api/tags.py b/src/airbyte_api/tags.py new file mode 100644 index 00000000..7a9eb1ff --- /dev/null +++ b/src/airbyte_api/tags.py @@ -0,0 +1,316 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +import requests as requests_http +from .sdkconfiguration import SDKConfiguration +from airbyte_api import api, errors, models, utils +from airbyte_api._hooks import AfterErrorContext, AfterSuccessContext, BeforeRequestContext, HookContext +from typing import Optional + +class Tags: + sdk_configuration: SDKConfiguration + + def __init__(self, sdk_config: SDKConfiguration) -> None: + self.sdk_configuration = sdk_config + + + + def create_tag(self, request: models.TagCreateRequest) -> api.CreateTagResponse: + r"""Create a tag + Create a tag + """ + hook_ctx = HookContext(operation_id='createTag', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = base_url + '/tags' + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + req_content_type, data, form = utils.serialize_request_body(request, models.TagCreateRequest, "request", False, False, 'json') + if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): + headers['content-type'] = req_content_type + if data is None and form is None: + raise Exception('request body is required') + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('POST', url, params=query_params, data=data, files=form, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['400','403','409','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.CreateTagResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.TagResponse]) + res.tag_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code == 409 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + + def delete_tag(self, request: api.DeleteTagRequest) -> api.DeleteTagResponse: + r"""Delete a tag + Delete a tag + """ + hook_ctx = HookContext(operation_id='deleteTag', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/tags/{tagId}', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + headers['Accept'] = '*/*' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('DELETE', url, params=query_params, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.DeleteTagResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 204: + pass + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + + def get_tag(self, request: api.GetTagRequest) -> api.GetTagResponse: + r"""Get a tag + Get a tag + """ + hook_ctx = HookContext(operation_id='getTag', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/tags/{tagId}', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.GetTagResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.TagResponse]) + res.tag_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + + def list_tags(self, request: api.ListTagsRequest) -> api.ListTagsResponse: + r"""List all tags + Lists all tags + """ + hook_ctx = HookContext(operation_id='listTags', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = base_url + '/tags' + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + query_params = { **utils.get_query_params(request), **query_params } + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('GET', url, params=query_params, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.ListTagsResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.TagsResponse]) + res.tags_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + + def update_tag(self, request: api.UpdateTagRequest) -> api.UpdateTagResponse: + r"""Update a tag + Update a tag + """ + hook_ctx = HookContext(operation_id='updateTag', oauth2_scopes=[], security_source=self.sdk_configuration.security) + base_url = utils.template_url(*self.sdk_configuration.get_server_details()) + + url = utils.generate_url(base_url, '/tags/{tagId}', request) + + if callable(self.sdk_configuration.security): + headers, query_params = utils.get_security(self.sdk_configuration.security()) + else: + headers, query_params = utils.get_security(self.sdk_configuration.security) + + req_content_type, data, form = utils.serialize_request_body(request, api.UpdateTagRequest, "tag_patch_request", False, False, 'json') + if req_content_type is not None and req_content_type not in ('multipart/form-data', 'multipart/mixed'): + headers['content-type'] = req_content_type + if data is None and form is None: + raise Exception('request body is required') + headers['Accept'] = 'application/json' + headers['user-agent'] = self.sdk_configuration.user_agent + client = self.sdk_configuration.client + + try: + req = client.prepare_request(requests_http.Request('PATCH', url, params=query_params, data=data, files=form, headers=headers)) + req = self.sdk_configuration.get_hooks().before_request(BeforeRequestContext(hook_ctx), req) + http_res = client.send(req) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), None, e) + if e is not None: + raise e + + if utils.match_status_codes(['400','403','404','4XX','5XX'], http_res.status_code): + result, e = self.sdk_configuration.get_hooks().after_error(AfterErrorContext(hook_ctx), http_res, None) + if e is not None: + raise e + if result is not None: + http_res = result + else: + http_res = self.sdk_configuration.get_hooks().after_success(AfterSuccessContext(hook_ctx), http_res) + + + + res = api.UpdateTagResponse(status_code=http_res.status_code, content_type=http_res.headers.get('Content-Type') or '', raw_response=http_res) + + if http_res.status_code == 200: + # pylint: disable=no-else-return + if utils.match_content_type(http_res.headers.get('Content-Type') or '', 'application/json'): + out = utils.unmarshal_json(http_res.text, Optional[models.TagResponse]) + res.tag_response = out + else: + content_type = http_res.headers.get('Content-Type') + raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) + elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + else: + raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) + + return res + + + diff --git a/src/airbyte_api/users.py b/src/airbyte_api/users.py index 06116105..44b70954 100644 --- a/src/airbyte_api/users.py +++ b/src/airbyte_api/users.py @@ -63,7 +63,9 @@ def list_users_within_an_organization(self, request: api.ListUsersWithinAnOrgani else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) diff --git a/src/airbyte_api/utils/utils.py b/src/airbyte_api/utils/utils.py index 58a54070..3b2bb1e4 100644 --- a/src/airbyte_api/utils/utils.py +++ b/src/airbyte_api/utils/utils.py @@ -614,7 +614,7 @@ def serialize_multipart_form( file_fields = fields(val) file_name = "" - field_name = "" + field_name = field_metadata.get("field_name") content = bytes() for file_field in file_fields: @@ -625,9 +625,8 @@ def serialize_multipart_form( if file_metadata.get("content") is True: content = getattr(val, file_field.name) else: - field_name = file_metadata.get("field_name", file_field.name) file_name = getattr(val, file_field.name) - if field_name == "" or file_name == "" or content == bytes(): + if file_name == "" or content == bytes(): raise Exception("invalid multipart/form-data file") form.append([field_name, [file_name, content]]) diff --git a/src/airbyte_api/workspaces.py b/src/airbyte_api/workspaces.py index bd5d877c..4a2ebc8b 100644 --- a/src/airbyte_api/workspaces.py +++ b/src/airbyte_api/workspaces.py @@ -62,7 +62,9 @@ def create_or_update_workspace_o_auth_credentials(self, request: api.CreateOrUpd if http_res.status_code == 200: pass - elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -122,7 +124,9 @@ def create_workspace(self, request: models.WorkspaceCreateRequest) -> api.Create else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -171,7 +175,9 @@ def delete_workspace(self, request: api.DeleteWorkspaceRequest) -> api.DeleteWor if http_res.status_code == 204: pass - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -226,7 +232,9 @@ def get_workspace(self, request: api.GetWorkspaceRequest) -> api.GetWorkspaceRes else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -282,7 +290,9 @@ def list_workspaces(self, request: api.ListWorkspacesRequest) -> api.ListWorkspa else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 403 or http_res.status_code == 404 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res) @@ -342,7 +352,9 @@ def update_workspace(self, request: api.UpdateWorkspaceRequest) -> api.UpdateWor else: content_type = http_res.headers.get('Content-Type') raise errors.SDKError(f'unknown content-type received: {content_type}', http_res.status_code, http_res.text, http_res) - elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500 or http_res.status_code >= 500 and http_res.status_code < 600: + elif http_res.status_code == 400 or http_res.status_code == 403 or http_res.status_code >= 400 and http_res.status_code < 500: + raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) + elif http_res.status_code >= 500 and http_res.status_code < 600: raise errors.SDKError('API error occurred', http_res.status_code, http_res.text, http_res) else: raise errors.SDKError('unknown status code received', http_res.status_code, http_res.text, http_res)