diff --git a/.speakeasy/gen.lock b/.speakeasy/gen.lock index 08c5ddf6..da48e959 100755 --- a/.speakeasy/gen.lock +++ b/.speakeasy/gen.lock @@ -1,12 +1,12 @@ lockVersion: 2.0.0 id: 07961597-3730-4940-9fd0-35eb4118eab3 management: - docChecksum: 9e5bfa57848f3a351e7908c770ab99b5 + docChecksum: ed32ee7b8636bcf3299067ede0939c1f docVersion: 1.0.0 speakeasyVersion: 1.508.0 generationVersion: 2.536.0 - releaseVersion: 0.53.0 - configChecksum: d4b8483119f7c6e028c35824472c58e6 + releaseVersion: 0.53.1 + configChecksum: a714172a7fed2d6454d58f1e37422309 repoURL: https://github.com/airbytehq/airbyte-api-python-sdk.git repoSubDirectory: . installationURL: https://github.com/airbytehq/airbyte-api-python-sdk.git @@ -144,7 +144,9 @@ generatedFiles: - docs/models/actionreporttime.md - docs/models/activecampaign.md - docs/models/actortypeenum.md + - docs/models/acuityscheduling.md - docs/models/adanalyticsreportconfiguration.md + - docs/models/adobecommercemagento.md - docs/models/agilecrm.md - docs/models/aha.md - docs/models/airbyte.md @@ -154,6 +156,7 @@ generatedFiles: - docs/models/akeneo.md - docs/models/algolia.md - docs/models/allow.md + - docs/models/alltypes.md - docs/models/alpacabrokerapi.md - docs/models/alphavantage.md - docs/models/amazonads.md @@ -243,6 +246,7 @@ generatedFiles: - docs/models/basic.md - docs/models/batchedstandardinserts.md - docs/models/beamer.md + - docs/models/bearertokenfromoauth2.md - docs/models/betweenfilter.md - docs/models/bigmailer.md - docs/models/bigquery.md @@ -360,6 +364,7 @@ generatedFiles: - docs/models/customerio.md - docs/models/customerly.md - docs/models/customerstatus.md + - docs/models/customplan.md - docs/models/customqueriesarray.md - docs/models/customreportconfig.md - docs/models/databricks.md @@ -368,6 +373,8 @@ generatedFiles: - docs/models/datacenterlocation.md - docs/models/datadog.md - docs/models/datafreshness.md + - docs/models/datagen.md + - docs/models/datagenerationtype.md - docs/models/dataregion.md - docs/models/datascope.md - docs/models/datasetlocation.md @@ -375,11 +382,13 @@ generatedFiles: - docs/models/datatype.md - docs/models/daterange.md - docs/models/days.md + - docs/models/db2enterprise.md - docs/models/dbt.md - docs/models/declarativesourcedefinitionresponse.md - docs/models/declarativesourcedefinitionsresponse.md - docs/models/deepset.md - docs/models/defaultvectorizer.md + - docs/models/defillama.md - docs/models/definitionofconversioncountinreports.md - docs/models/definitionresponse.md - docs/models/definitionsresponse.md @@ -485,7 +494,6 @@ generatedFiles: - docs/models/destinationhubspotcredentials.md - docs/models/destinationhubspothubspot.md - docs/models/destinationhubspotnone.md - - docs/models/destinationhubspotobjectstorageconfiguration.md - docs/models/destinationhubspots3.md - docs/models/destinationhubspots3bucketregion.md - docs/models/destinationhubspotschemasstoragetype.md @@ -705,6 +713,7 @@ generatedFiles: - docs/models/destinations3datalakes3bucketregion.md - docs/models/destinations3datalakeschemascatalogtype.md - docs/models/destinations3datalakeschemascatalogtypecatalogtype.md + - docs/models/destinations3datalakeschemascatalogtypecatalogtypecatalogtype.md - docs/models/destinations3deflate.md - docs/models/destinations3flattening.md - docs/models/destinations3formattype.md @@ -736,7 +745,7 @@ generatedFiles: - docs/models/destinations3zstandard.md - docs/models/destinationsalesforce.md - docs/models/destinationsalesforcenone.md - - docs/models/destinationsalesforceobjectstorageconfiguration.md + - docs/models/destinationsalesforceobjectstoragespec.md - docs/models/destinationsalesforces3.md - docs/models/destinationsalesforces3bucketregion.md - docs/models/destinationsalesforcesalesforce.md @@ -745,6 +754,7 @@ generatedFiles: - docs/models/destinationsftpjson.md - docs/models/destinationsnowflake.md - docs/models/destinationsnowflakeauthtype.md + - docs/models/destinationsnowflakecdcdeletionmode.md - docs/models/destinationsnowflakecortex.md - docs/models/destinationsnowflakecortexazureopenai.md - docs/models/destinationsnowflakecortexbymarkdownheader.md @@ -768,9 +778,7 @@ generatedFiles: - docs/models/destinationsnowflakecortexschemasprocessingtextsplittermode.md - docs/models/destinationsnowflakecortexschemasprocessingtextsplittertextsplittermode.md - docs/models/destinationsnowflakecortextextsplitter.md - - docs/models/destinationsnowflakeoauth20.md - docs/models/destinationsnowflakeschemasauthtype.md - - docs/models/destinationsnowflakeschemascredentialsauthtype.md - docs/models/destinationsresponse.md - docs/models/destinationsurrealdb.md - docs/models/destinationteradata.md @@ -892,6 +900,7 @@ generatedFiles: - docs/models/encryptionrsa.md - docs/models/engagementwindowdays.md - docs/models/enterprise.md + - docs/models/enterpriseplan.md - docs/models/entity.md - docs/models/environment.md - docs/models/equal.md @@ -916,6 +925,7 @@ generatedFiles: - docs/models/fastbill.md - docs/models/fastly.md - docs/models/fauna.md + - docs/models/fieldfiltering.md - docs/models/fieldnamemappingconfigmodel.md - docs/models/fieldrenaming.md - docs/models/fields.md @@ -948,6 +958,7 @@ generatedFiles: - docs/models/formattypewildcard.md - docs/models/formbricks.md - docs/models/freeagentconnector.md + - docs/models/freeplan.md - docs/models/freightview.md - docs/models/freshbooks.md - docs/models/freshcaller.md @@ -1011,6 +1022,7 @@ generatedFiles: - docs/models/greythr.md - docs/models/gridly.md - docs/models/groupby.md + - docs/models/growthplan.md - docs/models/guru.md - docs/models/gutendex.md - docs/models/gzip.md @@ -1041,6 +1053,7 @@ generatedFiles: - docs/models/imagga.md - docs/models/in_.md - docs/models/incidentio.md + - docs/models/incremental.md - docs/models/indexing.md - docs/models/inflowinventory.md - docs/models/initiateoauthrequest.md @@ -1126,6 +1139,7 @@ generatedFiles: - docs/models/mailjetsms.md - docs/models/mailosaur.md - docs/models/mailtrap.md + - docs/models/mantle.md - docs/models/mapperconfiguration.md - docs/models/marketnewscategory.md - docs/models/marketo.md @@ -1136,6 +1150,7 @@ generatedFiles: - docs/models/merge.md - docs/models/metabase.md - docs/models/method.md + - docs/models/metricool.md - docs/models/metricsfilter.md - docs/models/microsoftdataverse.md - docs/models/microsoftentraid.md @@ -1204,6 +1219,7 @@ generatedFiles: - docs/models/nylas.md - docs/models/nytimes.md - docs/models/oauth.md + - docs/models/oauth2.md - docs/models/oauth20.md - docs/models/oauth20credentials.md - docs/models/oauth20withprivatekey.md @@ -1214,6 +1230,7 @@ generatedFiles: - docs/models/oauthactornames.md - docs/models/oauthauthentication.md - docs/models/objectstorageconfiguration.md + - docs/models/objectstoragespec.md - docs/models/okta.md - docs/models/omnisend.md - docs/models/oncehub.md @@ -1244,6 +1261,7 @@ generatedFiles: - docs/models/origindatacenterofthesurveymonkeyaccount.md - docs/models/oura.md - docs/models/outbrainamplify.md + - docs/models/outlook.md - docs/models/outputformat.md - docs/models/outputformatwildcard.md - docs/models/outputsize.md @@ -1295,12 +1313,14 @@ generatedFiles: - docs/models/piwik.md - docs/models/plaid.md - docs/models/plaidenvironment.md + - docs/models/plan.md - docs/models/planhat.md - docs/models/plausible.md - docs/models/plugin.md - docs/models/pocket.md - docs/models/pokeapi.md - docs/models/pokemonname.md + - docs/models/polariscatalog.md - docs/models/polygonstockapi.md - docs/models/poplar.md - docs/models/postgres.md @@ -1321,6 +1341,7 @@ generatedFiles: - docs/models/productcatalog.md - docs/models/productive.md - docs/models/projectsecret.md + - docs/models/proplan.md - docs/models/protocol.md - docs/models/publicpermissiontype.md - docs/models/pubsub.md @@ -1329,10 +1350,10 @@ generatedFiles: - docs/models/qualaroo.md - docs/models/queries.md - docs/models/quickbooks.md - - docs/models/raas.md - docs/models/railz.md - docs/models/randomsampling.md - docs/models/range.md + - docs/models/ratelimitplan.md - docs/models/rdstationmarketing.md - docs/models/rdstationmarketingauthorization.md - docs/models/readchangesusingchangedatacapturecdc.md @@ -1353,8 +1374,8 @@ generatedFiles: - docs/models/replicatepermissionsacl.md - docs/models/replicaterecords.md - docs/models/replyio.md - - docs/models/reportbasedstreams.md - docs/models/reportconfig.md + - docs/models/reportids.md - docs/models/reportingdataobject.md - docs/models/reportname.md - docs/models/reportoptions.md @@ -1362,8 +1383,6 @@ generatedFiles: - docs/models/required.md - docs/models/resolution.md - docs/models/resourcerequirements.md - - docs/models/rest.md - - docs/models/restapistreams.md - docs/models/restcatalog.md - docs/models/retailexpressbymaropost.md - docs/models/retently.md @@ -1474,6 +1493,8 @@ generatedFiles: - docs/models/source100ms.md - docs/models/source7shifts.md - docs/models/sourceactivecampaign.md + - docs/models/sourceacuityscheduling.md + - docs/models/sourceadobecommercemagento.md - docs/models/sourceagilecrm.md - docs/models/sourceaha.md - docs/models/sourceairbyte.md @@ -1532,6 +1553,7 @@ generatedFiles: - docs/models/sourceazureblobstorageschemasheaderdefinitiontype.md - docs/models/sourceazureblobstorageschemasstreamsfiletype.md - docs/models/sourceazureblobstorageschemasstreamsformatfiletype.md + - docs/models/sourceazureblobstorageschemasstreamsformatformatfiletype.md - docs/models/sourceazuretable.md - docs/models/sourcebabelforce.md - docs/models/sourcebabelforceregion.md @@ -1625,8 +1647,26 @@ generatedFiles: - docs/models/sourcecustomeriocustomerio.md - docs/models/sourcecustomerly.md - docs/models/sourcedatadog.md + - docs/models/sourcedatagen.md + - docs/models/sourcedatagendatatype.md + - docs/models/sourcedatagenschemasdatatype.md - docs/models/sourcedatascope.md + - docs/models/sourcedb2enterprise.md + - docs/models/sourcedb2enterprisecursormethod.md + - docs/models/sourcedb2enterpriseencryption.md + - docs/models/sourcedb2enterpriseencryptionmethod.md + - docs/models/sourcedb2enterprisenotunnel.md + - docs/models/sourcedb2enterprisepasswordauthentication.md + - docs/models/sourcedb2enterpriseschemasencryptionmethod.md + - docs/models/sourcedb2enterpriseschemastunnelmethod.md + - docs/models/sourcedb2enterpriseschemastunnelmethodtunnelmethod.md + - docs/models/sourcedb2enterprisesshkeyauthentication.md + - docs/models/sourcedb2enterprisesshtunnelmethod.md + - docs/models/sourcedb2enterprisetlsencryptedverifycertificate.md + - docs/models/sourcedb2enterprisetunnelmethod.md + - docs/models/sourcedb2enterpriseunencrypted.md - docs/models/sourcedbt.md + - docs/models/sourcedefillama.md - docs/models/sourcedelighted.md - docs/models/sourcedeputy.md - docs/models/sourcedingconnect.md @@ -1683,6 +1723,7 @@ generatedFiles: - docs/models/sourcefacebookmarketingfacebookmarketing.md - docs/models/sourcefacebookmarketingschemasauthtype.md - docs/models/sourcefacebookmarketingserviceaccountkeyauthentication.md + - docs/models/sourcefacebookmarketingvalidactionbreakdowns.md - docs/models/sourcefacebookmarketingvalidenums.md - docs/models/sourcefacebookpages.md - docs/models/sourcefactorial.md @@ -1720,6 +1761,10 @@ generatedFiles: - docs/models/sourcefreshcaller.md - docs/models/sourcefreshchat.md - docs/models/sourcefreshdesk.md + - docs/models/sourcefreshdeskplan.md + - docs/models/sourcefreshdeskschemasplan.md + - docs/models/sourcefreshdeskschemasratelimitplanplan.md + - docs/models/sourcefreshdeskschemasratelimitplanratelimitplanplan.md - docs/models/sourcefreshsales.md - docs/models/sourcefreshservice.md - docs/models/sourcefront.md @@ -1734,6 +1779,7 @@ generatedFiles: - docs/models/sourcegcsavroformat.md - docs/models/sourcegcscsvformat.md - docs/models/sourcegcscsvheaderdefinition.md + - docs/models/sourcegcsexcelformat.md - docs/models/sourcegcsfilebasedstreamconfig.md - docs/models/sourcegcsfiletype.md - docs/models/sourcegcsformat.md @@ -2181,6 +2227,7 @@ generatedFiles: - docs/models/sourcemailjetsms.md - docs/models/sourcemailosaur.md - docs/models/sourcemailtrap.md + - docs/models/sourcemantle.md - docs/models/sourcemarketo.md - docs/models/sourcemarketstack.md - docs/models/sourcemendeley.md @@ -2188,6 +2235,7 @@ generatedFiles: - docs/models/sourcemercadoads.md - docs/models/sourcemerge.md - docs/models/sourcemetabase.md + - docs/models/sourcemetricool.md - docs/models/sourcemicrosoftdataverse.md - docs/models/sourcemicrosoftentraid.md - docs/models/sourcemicrosoftlists.md @@ -2290,6 +2338,8 @@ generatedFiles: - docs/models/sourcemssqlmssql.md - docs/models/sourcemssqlnotunnel.md - docs/models/sourcemssqlpasswordauthentication.md + - docs/models/sourcemssqlreadchangesusingchangedatacapturecdc.md + - docs/models/sourcemssqlscanchangeswithuserdefinedcursor.md - docs/models/sourcemssqlschemasmethod.md - docs/models/sourcemssqlschemassslmethod.md - docs/models/sourcemssqlschemassslmethodsslmethod.md @@ -2301,6 +2351,7 @@ generatedFiles: - docs/models/sourcemssqlsslmethod.md - docs/models/sourcemssqltunnelmethod.md - docs/models/sourcemssqlunencrypted.md + - docs/models/sourcemssqlupdatemethod.md - docs/models/sourcemux.md - docs/models/sourcemyhours.md - docs/models/sourcemysql.md @@ -2331,6 +2382,7 @@ generatedFiles: - docs/models/sourcenetsuite.md - docs/models/sourcenetsuiteenterprise.md - docs/models/sourcenetsuiteenterpriseauthenticationmethod.md + - docs/models/sourcenetsuiteenterprisecursormethod.md - docs/models/sourcenetsuiteenterprisenotunnel.md - docs/models/sourcenetsuiteenterprisepasswordauthentication.md - docs/models/sourcenetsuiteenterprisescanchangeswithuserdefinedcursor.md @@ -2435,6 +2487,7 @@ generatedFiles: - docs/models/sourceoutbrainamplifyaccesstoken.md - docs/models/sourceoutbrainamplifyauthenticationmethod.md - docs/models/sourceoutbrainamplifyusernamepassword.md + - docs/models/sourceoutlook.md - docs/models/sourceoutreach.md - docs/models/sourceoveit.md - docs/models/sourcepabblysubscriptionsbilling.md @@ -2615,6 +2668,7 @@ generatedFiles: - docs/models/sourcesaphanaenterpriseschemastunnelmethodtunnelmethod.md - docs/models/sourcesaphanaenterprisesshkeyauthentication.md - docs/models/sourcesaphanaenterprisesshtunnelmethod.md + - docs/models/sourcesaphanaenterprisetablefilter.md - docs/models/sourcesaphanaenterprisetlsencryptedverifycertificate.md - docs/models/sourcesaphanaenterprisetunnelmethod.md - docs/models/sourcesaphanaenterpriseunencrypted.md @@ -2801,6 +2855,11 @@ generatedFiles: - docs/models/sourcethrivelearning.md - docs/models/sourceticketmaster.md - docs/models/sourcetickettailor.md + - docs/models/sourceticktick.md + - docs/models/sourceticktickauthenticationtype.md + - docs/models/sourceticktickauthtype.md + - docs/models/sourceticktickschemasauthtype.md + - docs/models/sourceticktickticktick.md - docs/models/sourcetiktokmarketing.md - docs/models/sourcetiktokmarketingauthenticationmethod.md - docs/models/sourcetiktokmarketingauthtype.md @@ -2863,6 +2922,8 @@ generatedFiles: - docs/models/sourceworkable.md - docs/models/sourceworkday.md - docs/models/sourceworkdayauthentication.md + - docs/models/sourceworkdayrest.md + - docs/models/sourceworkdayrestauthentication.md - docs/models/sourceworkflowmax.md - docs/models/sourceworkramp.md - docs/models/sourcewrike.md @@ -2976,6 +3037,7 @@ generatedFiles: - docs/models/swipeupattributionwindow.md - docs/models/systeme.md - docs/models/systemidsid.md + - docs/models/tablefilter.md - docs/models/taboola.md - docs/models/tag.md - docs/models/tagcreaterequest.md @@ -3002,6 +3064,8 @@ generatedFiles: - docs/models/throttled.md - docs/models/ticketmaster.md - docs/models/tickettailor.md + - docs/models/ticktick.md + - docs/models/ticktickauthorization.md - docs/models/tiktokmarketing.md - docs/models/tiktokmarketingcredentials.md - docs/models/timeaggregates.md @@ -3094,6 +3158,7 @@ generatedFiles: - docs/models/wordpress.md - docs/models/workable.md - docs/models/workday.md + - docs/models/workdayrest.md - docs/models/workflowmax.md - docs/models/workramp.md - docs/models/workspacecreaterequest.md @@ -3375,6 +3440,8 @@ generatedFiles: - src/airbyte_api/models/source_100ms.py - src/airbyte_api/models/source_7shifts.py - src/airbyte_api/models/source_activecampaign.py + - src/airbyte_api/models/source_acuity_scheduling.py + - src/airbyte_api/models/source_adobe_commerce_magento.py - src/airbyte_api/models/source_agilecrm.py - src/airbyte_api/models/source_aha.py - src/airbyte_api/models/source_airbyte.py @@ -3473,8 +3540,11 @@ generatedFiles: - src/airbyte_api/models/source_customer_io.py - src/airbyte_api/models/source_customerly.py - src/airbyte_api/models/source_datadog.py + - src/airbyte_api/models/source_datagen.py - src/airbyte_api/models/source_datascope.py + - src/airbyte_api/models/source_db2_enterprise.py - src/airbyte_api/models/source_dbt.py + - src/airbyte_api/models/source_defillama.py - src/airbyte_api/models/source_delighted.py - src/airbyte_api/models/source_deputy.py - src/airbyte_api/models/source_ding_connect.py @@ -3635,6 +3705,7 @@ generatedFiles: - src/airbyte_api/models/source_mailjet_sms.py - src/airbyte_api/models/source_mailosaur.py - src/airbyte_api/models/source_mailtrap.py + - src/airbyte_api/models/source_mantle.py - src/airbyte_api/models/source_marketo.py - src/airbyte_api/models/source_marketstack.py - src/airbyte_api/models/source_mendeley.py @@ -3642,6 +3713,7 @@ generatedFiles: - src/airbyte_api/models/source_mercado_ads.py - src/airbyte_api/models/source_merge.py - src/airbyte_api/models/source_metabase.py + - src/airbyte_api/models/source_metricool.py - src/airbyte_api/models/source_microsoft_dataverse.py - src/airbyte_api/models/source_microsoft_entra_id.py - src/airbyte_api/models/source_microsoft_lists.py @@ -3695,6 +3767,7 @@ generatedFiles: - src/airbyte_api/models/source_orb.py - src/airbyte_api/models/source_oura.py - src/airbyte_api/models/source_outbrain_amplify.py + - src/airbyte_api/models/source_outlook.py - src/airbyte_api/models/source_outreach.py - src/airbyte_api/models/source_oveit.py - src/airbyte_api/models/source_pabbly_subscriptions_billing.py @@ -3841,6 +3914,7 @@ generatedFiles: - src/airbyte_api/models/source_thrive_learning.py - src/airbyte_api/models/source_ticketmaster.py - src/airbyte_api/models/source_tickettailor.py + - src/airbyte_api/models/source_ticktick.py - src/airbyte_api/models/source_tiktok_marketing.py - src/airbyte_api/models/source_timely.py - src/airbyte_api/models/source_tinyemail.py @@ -3883,6 +3957,7 @@ generatedFiles: - src/airbyte_api/models/source_wordpress.py - src/airbyte_api/models/source_workable.py - src/airbyte_api/models/source_workday.py + - src/airbyte_api/models/source_workday_rest.py - src/airbyte_api/models/source_workflowmax.py - src/airbyte_api/models/source_workramp.py - src/airbyte_api/models/source_wrike.py @@ -3934,6 +4009,7 @@ generatedFiles: - src/airbyte_api/models/tagpatchrequest.py - src/airbyte_api/models/tagresponse.py - src/airbyte_api/models/tagsresponse.py + - src/airbyte_api/models/ticktick.py - src/airbyte_api/models/tiktok_marketing.py - src/airbyte_api/models/typeform.py - src/airbyte_api/models/updatedeclarativesourcedefinitionrequest.py @@ -3998,7 +4074,7 @@ examples: offset: 0 responses: "200": - application/json: {"data": [{"configurations": {}, "connectionId": "", "createdAt": 726733, "destinationId": "", "name": "test-connection", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "basic"}, "sourceId": "", "status": "deprecated", "tags": [{"color": "grey", "name": "", "tagId": "a57cb08e-c762-471d-8a7a-04783f69e675", "workspaceId": "3c652149-922c-4e01-aac2-001d6f740af7"}, {"color": "white", "name": "", "tagId": "67a4cae4-cdf0-465b-a6aa-51c0435bd42a", "workspaceId": "ced986f7-e9d0-4438-b0f0-4622226f5515"}, {"color": "white", "name": "", "tagId": "6e726826-84d9-475b-9843-2b202970fa01", "workspaceId": "2f301cba-1d84-41d1-9873-2c8d3acb53bb"}], "workspaceId": ""}, {"configurations": {}, "connectionId": "", "createdAt": 808076, "destinationId": "", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "basic"}, "sourceId": "", "status": "inactive", "tags": [], "workspaceId": ""}, {"configurations": {}, "connectionId": "", "createdAt": 884379, "destinationId": "", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "manual"}, "sourceId": "49237019-645d-47d4-b45b-5eddf97775ce", "status": "active", "tags": [{"color": "orange", "name": "", "tagId": "164a228e-17f2-4cbb-832a-554182adc8da", "workspaceId": "6207024d-c903-4e50-8969-f94237c9d7c9"}, {"color": "gold", "name": "", "tagId": "b0cc902b-acff-4def-85ce-25c37e2027ce", "workspaceId": "52bb088a-99ac-49b4-93c4-fdb8fe8d3612"}, {"color": "maroon", "name": "", "tagId": "af798c5c-737f-47ad-95ce-eb405bab6ad1", "workspaceId": "ec04abc4-49b4-4a61-a77f-6c0dd2c2ed68"}], "workspaceId": ""}, {"configurations": {}, "connectionId": "", "createdAt": 584545, "destinationId": "al312fs-0ab1-4f72-9ed7-0b8fc27c5826", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "cron"}, "sourceId": "", "status": "inactive", "tags": [{"color": "teal", "name": "", "tagId": "e17d727d-7971-4e63-bc8a-8443f551b94a", "workspaceId": "40a42bea-9fa7-49d3-858c-d28e26b5d262"}, {"color": "tan", "name": "", "tagId": "7f8a0fc7-e3ff-45a9-bc05-4fb599f3003f", "workspaceId": "2a86dca0-6682-477b-b194-3225cfb3db50"}, {"color": "red", "name": "", "tagId": "15955c87-4dcd-4f1e-8d4c-c9a2ad68d233", "workspaceId": "4564af4a-f7b0-407b-8201-5ce5c0aa5c24"}], "workspaceId": ""}, {"configurations": {}, "connectionId": "", "createdAt": 821882, "destinationId": "", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "manual"}, "sourceId": "", "status": "active", "tags": [], "workspaceId": ""}, {"configurations": {}, "connectionId": "", "createdAt": 105968, "destinationId": "", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "basic"}, "sourceId": "", "status": "active", "tags": [], "workspaceId": ""}], "next": "https://api.airbyte.com/v1/connections?limit=5&offset=10", "previous": "https://api.airbyte.com/v1/connections?limit=5&offset=0"} + application/json: {"data": [{"configurations": {}, "connectionId": "", "createdAt": 726733, "destinationId": "", "name": "test-connection", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "basic"}, "sourceId": "", "status": "locked", "tags": [{"color": "grey", "name": "", "tagId": "a57cb08e-c762-471d-8a7a-04783f69e675", "workspaceId": "3c652149-922c-4e01-aac2-001d6f740af7"}, {"color": "white", "name": "", "tagId": "67a4cae4-cdf0-465b-a6aa-51c0435bd42a", "workspaceId": "ced986f7-e9d0-4438-b0f0-4622226f5515"}, {"color": "white", "name": "", "tagId": "6e726826-84d9-475b-9843-2b202970fa01", "workspaceId": "2f301cba-1d84-41d1-9873-2c8d3acb53bb"}], "workspaceId": ""}, {"configurations": {}, "connectionId": "", "createdAt": 808076, "destinationId": "", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "basic"}, "sourceId": "", "status": "inactive", "tags": [], "workspaceId": ""}, {"configurations": {}, "connectionId": "", "createdAt": 884379, "destinationId": "", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "manual"}, "sourceId": "49237019-645d-47d4-b45b-5eddf97775ce", "status": "active", "tags": [{"color": "orange", "name": "", "tagId": "164a228e-17f2-4cbb-832a-554182adc8da", "workspaceId": "6207024d-c903-4e50-8969-f94237c9d7c9"}, {"color": "gold", "name": "", "tagId": "b0cc902b-acff-4def-85ce-25c37e2027ce", "workspaceId": "52bb088a-99ac-49b4-93c4-fdb8fe8d3612"}, {"color": "maroon", "name": "", "tagId": "af798c5c-737f-47ad-95ce-eb405bab6ad1", "workspaceId": "ec04abc4-49b4-4a61-a77f-6c0dd2c2ed68"}], "workspaceId": ""}, {"configurations": {}, "connectionId": "", "createdAt": 584545, "destinationId": "al312fs-0ab1-4f72-9ed7-0b8fc27c5826", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "cron"}, "sourceId": "", "status": "deprecated", "tags": [{"color": "teal", "name": "", "tagId": "e17d727d-7971-4e63-bc8a-8443f551b94a", "workspaceId": "40a42bea-9fa7-49d3-858c-d28e26b5d262"}, {"color": "tan", "name": "", "tagId": "7f8a0fc7-e3ff-45a9-bc05-4fb599f3003f", "workspaceId": "2a86dca0-6682-477b-b194-3225cfb3db50"}, {"color": "red", "name": "", "tagId": "15955c87-4dcd-4f1e-8d4c-c9a2ad68d233", "workspaceId": "4564af4a-f7b0-407b-8201-5ce5c0aa5c24"}], "workspaceId": ""}, {"configurations": {}, "connectionId": "", "createdAt": 821882, "destinationId": "", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "manual"}, "sourceId": "", "status": "active", "tags": [], "workspaceId": ""}, {"configurations": {}, "connectionId": "", "createdAt": 105968, "destinationId": "", "name": "", "namespaceDefinition": "destination", "nonBreakingSchemaUpdatesBehavior": "ignore", "schedule": {"scheduleType": "basic"}, "sourceId": "", "status": "active", "tags": [], "workspaceId": ""}], "next": "https://api.airbyte.com/v1/connections?limit=5&offset=10", "previous": "https://api.airbyte.com/v1/connections?limit=5&offset=0"} patchConnection: Connection Update Request Example: parameters: @@ -4027,10 +4103,10 @@ examples: application/json: {"configuration": {"host": "grizzled-planula.com", "port": 1521, "schema": "airbyte", "sid": "", "username": "Lempi78"}, "createdAt": 971525, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "postgres", "name": "Analytics Team Postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} Destination Creation Response Example: requestBody: - application/json: {"configuration": {"credentials": {"client_id": "", "client_secret": "", "refresh_token": "", "type": "OAuth"}}, "name": "", "workspaceId": "b3c34f9e-3902-48cb-a03a-59b4797bfc7d"} + application/json: {"configuration": {"embedding": {"openai_key": ""}, "indexing": {"additional_headers": [{"header_key": "X-OpenAI-Api-Key", "value": "my-openai-api-key"}], "auth": {}, "batch_size": 128, "default_vectorizer": "none", "host": "https://my-cluster.weaviate.network", "tenant_id": "", "text_field": "text"}, "omit_raw_text": false, "processing": {"chunk_overlap": 0, "chunk_size": 240595, "metadata_fields": ["user.name"], "text_fields": ["text"]}}, "name": "", "workspaceId": "b3c34f9e-3902-48cb-a03a-59b4797bfc7d"} responses: "200": - application/json: {"configuration": {"cache_type": "hash", "host": "localhost,127.0.0.1", "port": 6379, "ssl": false, "username": "Gunner_Considine73"}, "createdAt": 616066, "definitionId": "", "destinationId": "af0c3c67-aa61-419f-8922-95b0bf840e86", "destinationType": "", "name": "", "workspaceId": ""} + application/json: {"configuration": {"destination_path": "motherduck:"}, "createdAt": 616066, "definitionId": "", "destinationId": "af0c3c67-aa61-419f-8922-95b0bf840e86", "destinationType": "", "name": "", "workspaceId": ""} deleteDestination: speakeasy-default-delete-destination: parameters: @@ -4070,10 +4146,10 @@ examples: path: destinationId: "" requestBody: - application/json: {"configuration": {"api_key": "", "host": "well-made-litter.org"}} + application/json: {"configuration": {"database": "", "disable_type_dedupe": false, "drop_cascade": false, "host": "unwelcome-valley.name", "password": "a3y7m_FvITj4z5C", "port": 5439, "schema": "public", "username": "Katlyn6"}} responses: "200": - application/json: {"configuration": {"access_key_id": "A012345678910EXAMPLE", "dynamodb_endpoint": "", "dynamodb_region": "", "dynamodb_table_name_prefix": "airbyte_sync", "secret_access_key": "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"}, "createdAt": 679016, "definitionId": "", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "", "name": "running", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} + application/json: {"configuration": {"database": "", "disable_type_dedupe": false, "host": "frozen-airman.com", "port": 3306, "ssl": true, "username": "Jerrell_Oberbrunner"}, "createdAt": 679016, "definitionId": "", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "", "name": "running", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} putDestination: Destination Update Request Example: parameters: @@ -4083,16 +4159,16 @@ examples: application/json: {"configuration": {"access_key": "", "deployment_url": "https://cluttered-owl-337.convex.cloud"}, "name": "My Destination"} responses: "200": - application/json: {"configuration": {"catalog_type": {"catalog_type": "REST", "namespace": "", "server_uri": "https://second-sustenance.org"}, "main_branch_name": "main", "s3_bucket_name": "", "s3_bucket_region": "us-east-1", "warehouse_location": "s3://your-bucket/path/to/store/files/in"}, "createdAt": 488187, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "postgres", "name": "Analytics Team Postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} + application/json: {"configuration": {"catalog_type": {"catalog_name": "", "catalog_type": "POLARIS", "client_id": "abc123clientid", "client_secret": "secretkey123", "namespace": "", "server_uri": "https://second-sustenance.org"}, "main_branch_name": "main", "s3_bucket_name": "", "s3_bucket_region": "us-east-1", "warehouse_location": "s3://your-bucket/path/to/store/files/in"}, "createdAt": 488187, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "postgres", "name": "Analytics Team Postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} Destination Update Response Example: parameters: path: destinationId: "" requestBody: - application/json: {"configuration": {"project_id": ""}, "name": ""} + application/json: {"configuration": {"database": "", "host": "greedy-fisherman.org", "load_type": {"load_type": "INSERT"}, "port": "1433", "schema": "public", "ssl_method": {"name": "unencrypted"}, "user": "charles"}, "name": ""} responses: "200": - application/json: {"configuration": {"project_id": ""}, "createdAt": 946510, "definitionId": "", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "", "name": "running", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} + application/json: {"configuration": {"database": "", "host": "concerned-warming.biz", "load_type": {"load_type": "INSERT"}, "port": "1433", "schema": "public", "ssl_method": {"name": "encrypted_verify_certificate"}, "user": "charles"}, "createdAt": 946510, "definitionId": "", "destinationId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "destinationType": "", "name": "running", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} getHealthCheck: {} cancelJob: speakeasy-default-cancel-job: @@ -4206,16 +4282,16 @@ examples: createSource: Source Creation Request Example: requestBody: - application/json: {"configuration": {"api_key": "", "site_id": "docs.airbyte.com"}, "name": "My Source", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} + application/json: {"configuration": {"access_token": "", "api_key": "", "client_id": "", "plaid_env": "production"}, "name": "My Source", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} responses: "200": - application/json: {"configuration": {"api_key": "", "start_date": "2017-01-25T00:00:00Z"}, "createdAt": 218560, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "name": "Analytics Team Postgres", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} + application/json: {"configuration": {"num_workers": 10}, "createdAt": 218560, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "name": "Analytics Team Postgres", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} Source Creation Response Example: requestBody: - application/json: {"configuration": {"access_token": "", "wrike_instance": "app-us2.wrike.com"}, "name": "", "workspaceId": "dc883bf1-95a6-46ef-b9bb-403f120decfe"} + application/json: {"configuration": {"agreement_grant_token": "", "app_secret_token": ""}, "name": "", "workspaceId": "dc883bf1-95a6-46ef-b9bb-403f120decfe"} responses: "200": - application/json: {"configuration": {"agreement_grant_token": "", "app_secret_token": ""}, "createdAt": 341415, "definitionId": "", "name": "", "sourceId": "0c31738c-0b2d-4887-b506-e2cd1c39cc35", "sourceType": "", "workspaceId": ""} + application/json: {"configuration": {"access_token": "", "wrike_instance": "app-us2.wrike.com"}, "createdAt": 341415, "definitionId": "", "name": "", "sourceId": "0c31738c-0b2d-4887-b506-e2cd1c39cc35", "sourceType": "", "workspaceId": ""} deleteSource: speakeasy-default-delete-source: parameters: @@ -4229,7 +4305,7 @@ examples: query: {} responses: "200": - application/json: {"configuration": {"api_key": ""}, "createdAt": 659848, "definitionId": "", "name": "running", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} + application/json: {"configuration": {"hoorayhrpassword": "", "hoorayhrusername": ""}, "createdAt": 659848, "definitionId": "", "name": "running", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} initiateOAuth: speakeasy-default-initiate-O-auth: requestBody: @@ -4244,45 +4320,45 @@ examples: offset: 0 responses: "200": - application/json: {"data": [{"configuration": {"access_token": ""}, "createdAt": 855060, "definitionId": "", "name": "Analytics Team Postgres", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"}], "next": "https://api.airbyte.com/v1/sources?limit=5&offset=10", "previous": "https://api.airbyte.com/v1/sources?limit=5&offset=0"} + application/json: {"data": [{"configuration": {"pool_ids": "0,1", "start_ids": "0,0", "url_base": "https://api.kyve.network"}, "createdAt": 461839, "definitionId": "", "name": "Analytics Team Postgres", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"}], "next": "https://api.airbyte.com/v1/sources?limit=5&offset=10", "previous": "https://api.airbyte.com/v1/sources?limit=5&offset=0"} patchSource: Source Update Request Example: parameters: path: sourceId: "" requestBody: - application/json: {"configuration": {"api_key": ""}, "name": "My Source", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} + application/json: {"configuration": {"endpoint": ""}, "name": "My Source", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} responses: "200": - application/json: {"configuration": {"credentials": {"client_id": "", "client_secret": "", "refresh_token": ""}}, "createdAt": 183665, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "name": "Analytics Team Postgres", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} + application/json: {"configuration": {"credentials": {"client_id": "", "client_secret": "", "refresh_token": ""}, "endpoint": "", "ignore_missing_read_permissions_tables": false, "region": ""}, "createdAt": 183665, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "name": "Analytics Team Postgres", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} Source Update Response Example: parameters: path: sourceId: "" requestBody: - application/json: {"configuration": {"api_key": "", "sub_domain": ""}, "name": "My source"} + application/json: {"configuration": {"api_key": "", "limit": "10", "start_date": "2023-07-21T07:07:43.436Z", "subdomain": "api"}, "name": "My source"} responses: "200": - application/json: {"configuration": {"domain_id": "", "secret_key": "", "start_date": "2023-07-30T03:43:59.244Z"}, "createdAt": 291381, "definitionId": "", "name": "running", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} + application/json: {"configuration": {"username": "Orie_Kulas14"}, "createdAt": 291381, "definitionId": "", "name": "running", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} putSource: Source Update Request Example: parameters: path: sourceId: "" requestBody: - application/json: {"configuration": {"api_key": "", "grid_id": ""}, "name": "My Source"} + application/json: {"configuration": {"base_url": "https://amazing-basket.info", "domain": "whimsical-overheard.com", "username": "Euna.Hickle"}, "name": "My Source"} responses: "200": - application/json: {"configuration": {"email": "Annabell_Williamson@gmail.com", "password": "1CuU7W6lTkgQ_XF"}, "createdAt": 22579, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "name": "Analytics Team Postgres", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} + application/json: {"configuration": {"x-api-key": ""}, "createdAt": 22579, "definitionId": "321d9b60-11d1-44cb-8c92-c246d53bf98e", "name": "Analytics Team Postgres", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "871d9b60-11d1-44cb-8c92-c246d53bf87e"} Source Update Response Example: parameters: path: sourceId: "" requestBody: - application/json: {"configuration": {"api_key": "", "user_email": ""}, "name": ""} + application/json: {"configuration": {"access_token": "", "gocardless_environment": "sandbox", "gocardless_version": "", "start_date": "2017-01-25T00:00:00Z"}, "name": ""} responses: "200": - application/json: {"configuration": {"api_key": "", "start_date": "2024-12-28T19:32:08.794Z"}, "createdAt": 142182, "definitionId": "", "name": "running", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} + application/json: {"configuration": {"app_id": "", "base": "USD", "start_date": "YYYY-MM-DD"}, "createdAt": 142182, "definitionId": "", "name": "running", "sourceId": "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826", "sourceType": "postgres", "workspaceId": "744cc0ed-7f05-4949-9e60-2a814f90c035"} getStreamProperties: speakeasy-default-get-stream-properties: parameters: diff --git a/.speakeasy/workflow.lock b/.speakeasy/workflow.lock index 48f3f343..0b6f7d1a 100644 --- a/.speakeasy/workflow.lock +++ b/.speakeasy/workflow.lock @@ -2,20 +2,20 @@ speakeasyVersion: 1.508.0 sources: my-source: sourceNamespace: my-source - sourceRevisionDigest: sha256:1d2f15b9c790a784932030450e0ebac32bef1bd690cd86c1d7f7968c1accb931 - sourceBlobDigest: sha256:d0a881322fa4de4a316a25d0c5504263e8a3fc55d31d825e47a6c8de61d9641a + sourceRevisionDigest: sha256:50ca51dd93e5d4e09d2983a052584fda95e21fdf76369f14c63d71ec172f1477 + sourceBlobDigest: sha256:3b1e37edc32673ad701c47f567c916438b6456d4f034d8f0c4fd6fc8fed10e75 tags: - latest - - speakeasy-sdk-regen-1759191606 + - speakeasy-sdk-regen-1761870041 - 1.0.0 targets: python-api: source: my-source sourceNamespace: my-source - sourceRevisionDigest: sha256:1d2f15b9c790a784932030450e0ebac32bef1bd690cd86c1d7f7968c1accb931 - sourceBlobDigest: sha256:d0a881322fa4de4a316a25d0c5504263e8a3fc55d31d825e47a6c8de61d9641a + sourceRevisionDigest: sha256:50ca51dd93e5d4e09d2983a052584fda95e21fdf76369f14c63d71ec172f1477 + sourceBlobDigest: sha256:3b1e37edc32673ad701c47f567c916438b6456d4f034d8f0c4fd6fc8fed10e75 codeSamplesNamespace: my-source-python-code-samples - codeSamplesRevisionDigest: sha256:a9c4fd43ac0bf5205301bb214e4d267ae9955228d7a46614ff0cd5393dc37d8a + codeSamplesRevisionDigest: sha256:6807b4f22827f6d5c01fafa2fe337e92db61dee7be90845f4cb466d2a2549ee2 workflow: workflowVersion: 1.0.0 speakeasyVersion: 1.508.0 diff --git a/RELEASES.md b/RELEASES.md index 8ca595c4..721c7064 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -978,4 +978,14 @@ Based on: ### Generated - [python v0.53.0] . ### Releases -- [PyPI v0.53.0] https://pypi.org/project/airbyte-api/0.53.0 - . \ No newline at end of file +- [PyPI v0.53.0] https://pypi.org/project/airbyte-api/0.53.0 - . + +## 2025-12-11 00:21:58 +### Changes +Based on: +- OpenAPI Doc +- Speakeasy CLI 1.508.0 (2.536.0) https://github.com/speakeasy-api/speakeasy +### Generated +- [python v0.53.1] . +### Releases +- [PyPI v0.53.1] https://pypi.org/project/airbyte-api/0.53.1 - . \ No newline at end of file diff --git a/docs/models/acuityscheduling.md b/docs/models/acuityscheduling.md new file mode 100644 index 00000000..a087a3ff --- /dev/null +++ b/docs/models/acuityscheduling.md @@ -0,0 +1,8 @@ +# AcuityScheduling + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `ACUITY_SCHEDULING` | acuity-scheduling | \ No newline at end of file diff --git a/docs/models/adobecommercemagento.md b/docs/models/adobecommercemagento.md new file mode 100644 index 00000000..15fd391d --- /dev/null +++ b/docs/models/adobecommercemagento.md @@ -0,0 +1,8 @@ +# AdobeCommerceMagento + + +## Values + +| Name | Value | +| ------------------------ | ------------------------ | +| `ADOBE_COMMERCE_MAGENTO` | adobe-commerce-magento | \ No newline at end of file diff --git a/docs/models/alltypes.md b/docs/models/alltypes.md new file mode 100644 index 00000000..76f21e0c --- /dev/null +++ b/docs/models/alltypes.md @@ -0,0 +1,11 @@ +# AllTypes + +Generates one column of each Airbyte data type. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `data_type` | [Optional[models.SourceDatagenSchemasDataType]](../models/sourcedatagenschemasdatatype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/authorizationmethod.md b/docs/models/authorizationmethod.md index 51ad3999..f62e807d 100644 --- a/docs/models/authorizationmethod.md +++ b/docs/models/authorizationmethod.md @@ -1,5 +1,7 @@ # AuthorizationMethod +Determines the type of authentication that should be used. + ## Supported Types @@ -15,9 +17,3 @@ value: models.KeyPairAuthentication = /* values here */ value: models.UsernameAndPassword = /* values here */ ``` -### `models.DestinationSnowflakeOAuth20` - -```python -value: models.DestinationSnowflakeOAuth20 = /* values here */ -``` - diff --git a/docs/models/bearertokenfromoauth2.md b/docs/models/bearertokenfromoauth2.md new file mode 100644 index 00000000..019fd9f4 --- /dev/null +++ b/docs/models/bearertokenfromoauth2.md @@ -0,0 +1,9 @@ +# BearerTokenFromOauth2 + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | +| `bearer_token` | *str* | :heavy_check_mark: | Access token for making authenticated requests; filled after complete oauth2 flow. | +| `auth_type` | [models.SourceTicktickSchemasAuthType](../models/sourceticktickschemasauthtype.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/catalogtype.md b/docs/models/catalogtype.md index 7dbfba0d..09902ae6 100644 --- a/docs/models/catalogtype.md +++ b/docs/models/catalogtype.md @@ -1,6 +1,6 @@ # CatalogType -Specifies the type of Iceberg catalog (e.g., NESSIE, GLUE, REST) and its associated configuration. +Specifies the type of Iceberg catalog (e.g., NESSIE, GLUE, REST, POLARIS) and its associated configuration. ## Supported Types @@ -23,3 +23,9 @@ value: models.GlueCatalog = /* values here */ value: models.RestCatalog = /* values here */ ``` +### `models.PolarisCatalog` + +```python +value: models.PolarisCatalog = /* values here */ +``` + diff --git a/docs/models/connectionresponse.md b/docs/models/connectionresponse.md index 3751a9df..982c76fb 100644 --- a/docs/models/connectionresponse.md +++ b/docs/models/connectionresponse.md @@ -20,4 +20,5 @@ Provides details of a single connection. | `namespace_definition` | [Optional[models.NamespaceDefinitionEnum]](../models/namespacedefinitionenum.md) | :heavy_minus_sign: | Define the location where the data will be stored in the destination | | `namespace_format` | *Optional[str]* | :heavy_minus_sign: | N/A | | `non_breaking_schema_updates_behavior` | [Optional[models.NonBreakingSchemaUpdatesBehaviorEnum]](../models/nonbreakingschemaupdatesbehaviorenum.md) | :heavy_minus_sign: | Set how Airbyte handles syncs when it detects a non-breaking schema change in the source | -| `prefix` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file +| `prefix` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `status_reason` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/connectionstatusenum.md b/docs/models/connectionstatusenum.md index 58ac60da..c00d16be 100644 --- a/docs/models/connectionstatusenum.md +++ b/docs/models/connectionstatusenum.md @@ -7,4 +7,5 @@ | ------------ | ------------ | | `ACTIVE` | active | | `INACTIVE` | inactive | -| `DEPRECATED` | deprecated | \ No newline at end of file +| `DEPRECATED` | deprecated | +| `LOCKED` | locked | \ No newline at end of file diff --git a/docs/models/customplan.md b/docs/models/customplan.md new file mode 100644 index 00000000..8e94af27 --- /dev/null +++ b/docs/models/customplan.md @@ -0,0 +1,11 @@ +# CustomPlan + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------ | +| `contacts_rate_limit` | *Optional[int]* | :heavy_minus_sign: | Maximum Rate in Limit/minute for contacts list endpoint in Custom Plan | +| `general_rate_limit` | *Optional[int]* | :heavy_minus_sign: | General Maximum Rate in Limit/minute for other endpoints in Custom Plan | +| `plan_type` | [Optional[models.SourceFreshdeskSchemasRateLimitPlanRateLimitPlanPlan]](../models/sourcefreshdeskschemasratelimitplanratelimitplanplan.md) | :heavy_minus_sign: | N/A | +| `tickets_rate_limit` | *Optional[int]* | :heavy_minus_sign: | Maximum Rate in Limit/minute for tickets list endpoint in Custom Plan | \ No newline at end of file diff --git a/docs/models/customreportconfig.md b/docs/models/customreportconfig.md index c5cb309f..2687439d 100644 --- a/docs/models/customreportconfig.md +++ b/docs/models/customreportconfig.md @@ -3,9 +3,10 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------ | -| `name` | *str* | :heavy_check_mark: | The name of the custom report, this name would be used as stream name | Account Performance | -| `report_columns` | List[*str*] | :heavy_check_mark: | A list of available report object columns. You can find it in description of reporting object that you want to add to custom report. | | -| `reporting_object` | [models.ReportingDataObject](../models/reportingdataobject.md) | :heavy_check_mark: | The name of the the object derives from the ReportRequest object. You can find it in Bing Ads Api docs - Reporting API - Reporting Data Objects. | | -| `report_aggregation` | *Optional[str]* | :heavy_minus_sign: | A list of available aggregations. | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `name` | *str* | :heavy_check_mark: | The name of the custom report, this name would be used as stream name | Account Performance | +| `report_columns` | List[*str*] | :heavy_check_mark: | A list of available report object columns. You can find it in description of reporting object that you want to add to custom report. | | +| `reporting_object` | [models.ReportingDataObject](../models/reportingdataobject.md) | :heavy_check_mark: | The name of the the object derives from the ReportRequest object. You can find it in Bing Ads Api docs - Reporting API - Reporting Data Objects. | | +| `disable_custom_report_names_camel_to_snake_conversion` | *Optional[bool]* | :heavy_minus_sign: | When enabled, disables the automatic conversion of custom report names from camelCase to snake_case. By default, custom report names are automatically converted (e.g., 'MyCustomReport' becomes 'my_custom_report'). Enable this option if you want to use the exact report names you specify. | | +| `report_aggregation` | *Optional[str]* | :heavy_minus_sign: | A list of available aggregations. | | \ No newline at end of file diff --git a/docs/models/datagen.md b/docs/models/datagen.md new file mode 100644 index 00000000..87586f62 --- /dev/null +++ b/docs/models/datagen.md @@ -0,0 +1,8 @@ +# Datagen + + +## Values + +| Name | Value | +| --------- | --------- | +| `DATAGEN` | datagen | \ No newline at end of file diff --git a/docs/models/datagenerationtype.md b/docs/models/datagenerationtype.md new file mode 100644 index 00000000..c6cb0687 --- /dev/null +++ b/docs/models/datagenerationtype.md @@ -0,0 +1,19 @@ +# DataGenerationType + +Different patterns for generating data + + +## Supported Types + +### `models.Incremental` + +```python +value: models.Incremental = /* values here */ +``` + +### `models.AllTypes` + +```python +value: models.AllTypes = /* values here */ +``` + diff --git a/docs/models/db2enterprise.md b/docs/models/db2enterprise.md new file mode 100644 index 00000000..d26ef55e --- /dev/null +++ b/docs/models/db2enterprise.md @@ -0,0 +1,8 @@ +# Db2Enterprise + + +## Values + +| Name | Value | +| ---------------- | ---------------- | +| `DB2_ENTERPRISE` | db2-enterprise | \ No newline at end of file diff --git a/docs/models/defillama.md b/docs/models/defillama.md new file mode 100644 index 00000000..bb30cbdf --- /dev/null +++ b/docs/models/defillama.md @@ -0,0 +1,8 @@ +# Defillama + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `DEFILLAMA` | defillama | \ No newline at end of file diff --git a/docs/models/destinationazureblobstorage.md b/docs/models/destinationazureblobstorage.md index 04bbc820..ad765ed9 100644 --- a/docs/models/destinationazureblobstorage.md +++ b/docs/models/destinationazureblobstorage.md @@ -3,13 +3,16 @@ ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `azure_blob_storage_account_name` | *str* | :heavy_check_mark: | The name of the Azure Blob Storage Account. Read more here. | mystorageaccount | -| `azure_blob_storage_container_name` | *str* | :heavy_check_mark: | The name of the Azure Blob Storage Container. Read more here. | mycontainer | -| `format` | [models.OutputFormat](../models/outputformat.md) | :heavy_check_mark: | Format of the data output. | | -| `azure_blob_storage_account_key` | *Optional[str]* | :heavy_minus_sign: | The Azure blob storage account key. If you set this value, you must not set the Shared Access Signature. | Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd== | -| `azure_blob_storage_endpoint_domain_name` | *Optional[str]* | :heavy_minus_sign: | This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example. | | -| `azure_blob_storage_spill_size` | *Optional[int]* | :heavy_minus_sign: | The amount of megabytes after which the connector should spill the records in a new blob object. Make sure to configure size greater than individual records. Enter 0 if not applicable. | | -| `destination_type` | [models.DestinationAzureBlobStorageAzureBlobStorage](../models/destinationazureblobstorageazureblobstorage.md) | :heavy_check_mark: | N/A | | -| `shared_access_signature` | *Optional[str]* | :heavy_minus_sign: | A shared access signature (SAS) provides secure delegated access to resources in your storage account. Read more here. If you set this value, you must not set the account key. | sv=2021-08-06&st=2025-04-11T00%3A00%3A00Z&se=2025-04-12T00%3A00%3A00Z&sr=b&sp=rw&sig=abcdefghijklmnopqrstuvwxyz1234567890%2Fabcdefg%3D | \ No newline at end of file +| Field | Type | Required | Description | Example | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `azure_blob_storage_account_name` | *str* | :heavy_check_mark: | The name of the Azure Blob Storage Account. Read more here. | mystorageaccount | +| `azure_blob_storage_container_name` | *str* | :heavy_check_mark: | The name of the Azure Blob Storage Container. Read more here. | mycontainer | +| `format` | [models.OutputFormat](../models/outputformat.md) | :heavy_check_mark: | Format of the data output. | | +| `azure_blob_storage_account_key` | *Optional[str]* | :heavy_minus_sign: | The Azure Blob Storage account key. If you set this value, you must not set the "Shared Access Signature", "Azure Tenant ID", "Azure Client ID", or "Azure Client Secret" fields. | Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd== | +| `azure_blob_storage_endpoint_domain_name` | *Optional[str]* | :heavy_minus_sign: | This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example. | | +| `azure_blob_storage_spill_size` | *Optional[int]* | :heavy_minus_sign: | The amount of megabytes after which the connector should spill the records in a new blob object. Make sure to configure size greater than individual records. Enter 0 if not applicable. | | +| `azure_client_id` | *Optional[str]* | :heavy_minus_sign: | The Azure Active Directory (Entra ID) client ID. Required for Entra ID authentication. | 87654321-4321-4321-4321-210987654321 | +| `azure_client_secret` | *Optional[str]* | :heavy_minus_sign: | The Azure Active Directory (Entra ID) client secret. Required for Entra ID authentication. | your-client-secret | +| `azure_tenant_id` | *Optional[str]* | :heavy_minus_sign: | The Azure Active Directory (Entra ID) tenant ID. Required for Entra ID authentication. | 12345678-1234-1234-1234-123456789012 | +| `destination_type` | [models.DestinationAzureBlobStorageAzureBlobStorage](../models/destinationazureblobstorageazureblobstorage.md) | :heavy_check_mark: | N/A | | +| `shared_access_signature` | *Optional[str]* | :heavy_minus_sign: | A shared access signature (SAS) provides secure delegated access to resources in your storage account. Read more here. If you set this value, you must not set the "Azure Blob Storage Account Key", "Azure Tenant ID", "Azure Client ID", or "Azure Client Secret" fields. | sv=2021-08-06&st=2025-04-11T00%3A00%3A00Z&se=2025-04-12T00%3A00%3A00Z&sr=b&sp=rw&sig=abcdefghijklmnopqrstuvwxyz1234567890%2Fabcdefg%3D | \ No newline at end of file diff --git a/docs/models/destinationcustomerio.md b/docs/models/destinationcustomerio.md index f31d1d00..779af2e1 100644 --- a/docs/models/destinationcustomerio.md +++ b/docs/models/destinationcustomerio.md @@ -7,4 +7,4 @@ | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------- | | `credentials` | [models.DestinationCustomerIoCredentials](../models/destinationcustomeriocredentials.md) | :heavy_check_mark: | Enter the site ID and API key to authenticate. | | `destination_type` | [models.CustomerIo](../models/customerio.md) | :heavy_check_mark: | N/A | -| `object_storage_config` | [Optional[models.ObjectStorageConfiguration]](../models/objectstorageconfiguration.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `object_storage_config` | [Optional[models.ObjectStorageSpec]](../models/objectstoragespec.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationhubspot.md b/docs/models/destinationhubspot.md index 6e7264df..6fe0a098 100644 --- a/docs/models/destinationhubspot.md +++ b/docs/models/destinationhubspot.md @@ -3,8 +3,8 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | -| `credentials` | [models.DestinationHubspotCredentials](../models/destinationhubspotcredentials.md) | :heavy_check_mark: | Choose how to authenticate to HubSpot. | -| `destination_type` | [models.DestinationHubspotHubspot](../models/destinationhubspothubspot.md) | :heavy_check_mark: | N/A | -| `object_storage_config` | [Optional[models.DestinationHubspotObjectStorageConfiguration]](../models/destinationhubspotobjectstorageconfiguration.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | +| `credentials` | [models.DestinationHubspotCredentials](../models/destinationhubspotcredentials.md) | :heavy_check_mark: | Choose how to authenticate to HubSpot. | +| `destination_type` | [models.DestinationHubspotHubspot](../models/destinationhubspothubspot.md) | :heavy_check_mark: | N/A | +| `object_storage_config` | [Optional[models.ObjectStorageConfiguration]](../models/objectstorageconfiguration.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationhubspotobjectstorageconfiguration.md b/docs/models/destinationhubspotobjectstorageconfiguration.md deleted file mode 100644 index c89c576c..00000000 --- a/docs/models/destinationhubspotobjectstorageconfiguration.md +++ /dev/null @@ -1,17 +0,0 @@ -# DestinationHubspotObjectStorageConfiguration - - -## Supported Types - -### `models.DestinationHubspotNone` - -```python -value: models.DestinationHubspotNone = /* values here */ -``` - -### `models.DestinationHubspotS3` - -```python -value: models.DestinationHubspotS3 = /* values here */ -``` - diff --git a/docs/models/destinations3datalake.md b/docs/models/destinations3datalake.md index 7bff6b3e..f1f6ab1e 100644 --- a/docs/models/destinations3datalake.md +++ b/docs/models/destinations3datalake.md @@ -7,7 +7,7 @@ Defines the configurations required to connect to an Iceberg catalog, including | Field | Type | Required | Description | Example | | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `catalog_type` | [models.CatalogType](../models/catalogtype.md) | :heavy_check_mark: | Specifies the type of Iceberg catalog (e.g., NESSIE, GLUE, REST) and its associated configuration. | | +| `catalog_type` | [models.CatalogType](../models/catalogtype.md) | :heavy_check_mark: | Specifies the type of Iceberg catalog (e.g., NESSIE, GLUE, REST, POLARIS) and its associated configuration. | | | `s3_bucket_name` | *str* | :heavy_check_mark: | The name of the S3 bucket that will host the Iceberg data. | | | `s3_bucket_region` | [models.DestinationS3DataLakeS3BucketRegion](../models/destinations3datalakes3bucketregion.md) | :heavy_check_mark: | The region of the S3 bucket. See here for all region codes. | us-east-1 | | `warehouse_location` | *str* | :heavy_check_mark: | The root location of the data warehouse used by the Iceberg catalog. Typically includes a bucket name and path within that bucket. For AWS Glue and Nessie, must include the storage protocol (such as "s3://" for Amazon S3). | s3://your-bucket/path/to/store/files/in | diff --git a/docs/models/destinations3datalakeschemascatalogtypecatalogtype.md b/docs/models/destinations3datalakeschemascatalogtypecatalogtype.md index 823504c5..de9dc626 100644 --- a/docs/models/destinations3datalakeschemascatalogtypecatalogtype.md +++ b/docs/models/destinations3datalakeschemascatalogtypecatalogtype.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| -------- | -------- | -| `NESSIE` | NESSIE | \ No newline at end of file +| Name | Value | +| --------- | --------- | +| `POLARIS` | POLARIS | \ No newline at end of file diff --git a/docs/models/destinations3datalakeschemascatalogtypecatalogtypecatalogtype.md b/docs/models/destinations3datalakeschemascatalogtypecatalogtypecatalogtype.md new file mode 100644 index 00000000..d7945e9f --- /dev/null +++ b/docs/models/destinations3datalakeschemascatalogtypecatalogtypecatalogtype.md @@ -0,0 +1,8 @@ +# DestinationS3DataLakeSchemasCatalogTypeCatalogTypeCatalogType + + +## Values + +| Name | Value | +| -------- | -------- | +| `NESSIE` | NESSIE | \ No newline at end of file diff --git a/docs/models/destinationsalesforce.md b/docs/models/destinationsalesforce.md index 6b99b1c2..d70d6bf8 100644 --- a/docs/models/destinationsalesforce.md +++ b/docs/models/destinationsalesforce.md @@ -11,4 +11,4 @@ | `auth_type` | [models.AuthType](../models/authtype.md) | :heavy_check_mark: | N/A | | `destination_type` | [models.DestinationSalesforceSalesforce](../models/destinationsalesforcesalesforce.md) | :heavy_check_mark: | N/A | | `is_sandbox` | *Optional[bool]* | :heavy_minus_sign: | Toggle if you're using a Salesforce Sandbox. | -| `object_storage_config` | [Optional[models.DestinationSalesforceObjectStorageConfiguration]](../models/destinationsalesforceobjectstorageconfiguration.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `object_storage_config` | [Optional[models.DestinationSalesforceObjectStorageSpec]](../models/destinationsalesforceobjectstoragespec.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationsalesforceobjectstorageconfiguration.md b/docs/models/destinationsalesforceobjectstoragespec.md similarity index 83% rename from docs/models/destinationsalesforceobjectstorageconfiguration.md rename to docs/models/destinationsalesforceobjectstoragespec.md index d72cd7d8..367e1fb7 100644 --- a/docs/models/destinationsalesforceobjectstorageconfiguration.md +++ b/docs/models/destinationsalesforceobjectstoragespec.md @@ -1,4 +1,4 @@ -# DestinationSalesforceObjectStorageConfiguration +# DestinationSalesforceObjectStorageSpec ## Supported Types diff --git a/docs/models/destinationsnowflake.md b/docs/models/destinationsnowflake.md index 0081ac0e..574ff2aa 100644 --- a/docs/models/destinationsnowflake.md +++ b/docs/models/destinationsnowflake.md @@ -11,10 +11,10 @@ | `schema` | *str* | :heavy_check_mark: | Enter the name of the default schema | AIRBYTE_SCHEMA | | `username` | *str* | :heavy_check_mark: | Enter the name of the user you want to use to access the database | AIRBYTE_USER | | `warehouse` | *str* | :heavy_check_mark: | Enter the name of the warehouse that you want to use as a compute cluster | AIRBYTE_WAREHOUSE | -| `credentials` | [Optional[models.AuthorizationMethod]](../models/authorizationmethod.md) | :heavy_minus_sign: | N/A | | +| `cdc_deletion_mode` | [Optional[models.DestinationSnowflakeCDCDeletionMode]](../models/destinationsnowflakecdcdeletionmode.md) | :heavy_minus_sign: | Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. | | +| `credentials` | [Optional[models.AuthorizationMethod]](../models/authorizationmethod.md) | :heavy_minus_sign: | Determines the type of authentication that should be used. | | | `destination_type` | [models.Snowflake](../models/snowflake.md) | :heavy_check_mark: | N/A | | -| `disable_type_dedupe` | *Optional[bool]* | :heavy_minus_sign: | Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions | | +| `disable_type_dedupe` | *Optional[bool]* | :heavy_minus_sign: | Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector. | | | `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3 | | -| `raw_data_schema` | *Optional[str]* | :heavy_minus_sign: | The schema to write raw tables into (default: airbyte_internal) | | -| `retention_period_days` | *Optional[int]* | :heavy_minus_sign: | The number of days of Snowflake Time Travel to enable on the tables. See Snowflake's documentation for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance. | | -| `use_merge_for_upsert` | *Optional[bool]* | :heavy_minus_sign: | Use MERGE for de-duplication of final tables. This option no effect if Final tables are disabled or Sync mode is not DEDUPE | | \ No newline at end of file +| `raw_data_schema` | *Optional[str]* | :heavy_minus_sign: | Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to "airbyte_internal". | | +| `retention_period_days` | *Optional[int]* | :heavy_minus_sign: | The number of days of Snowflake Time Travel to enable on the tables. See Snowflake's documentation for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance. | | \ No newline at end of file diff --git a/docs/models/destinationsnowflakecdcdeletionmode.md b/docs/models/destinationsnowflakecdcdeletionmode.md new file mode 100644 index 00000000..91c4b714 --- /dev/null +++ b/docs/models/destinationsnowflakecdcdeletionmode.md @@ -0,0 +1,11 @@ +# DestinationSnowflakeCDCDeletionMode + +Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `HARD_DELETE` | Hard delete | +| `SOFT_DELETE` | Soft delete | \ No newline at end of file diff --git a/docs/models/destinationsnowflakeoauth20.md b/docs/models/destinationsnowflakeoauth20.md deleted file mode 100644 index 48bea659..00000000 --- a/docs/models/destinationsnowflakeoauth20.md +++ /dev/null @@ -1,12 +0,0 @@ -# DestinationSnowflakeOAuth20 - - -## Fields - -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------ | -| `access_token` | *str* | :heavy_check_mark: | Enter you application's Access Token | -| `refresh_token` | *str* | :heavy_check_mark: | Enter your application's Refresh Token | -| `auth_type` | [Optional[models.DestinationSnowflakeSchemasCredentialsAuthType]](../models/destinationsnowflakeschemascredentialsauthtype.md) | :heavy_minus_sign: | N/A | -| `client_id` | *Optional[str]* | :heavy_minus_sign: | Enter your application's Client ID | -| `client_secret` | *Optional[str]* | :heavy_minus_sign: | Enter your application's Client secret | \ No newline at end of file diff --git a/docs/models/destinationsnowflakeschemascredentialsauthtype.md b/docs/models/destinationsnowflakeschemascredentialsauthtype.md deleted file mode 100644 index 6d9682b2..00000000 --- a/docs/models/destinationsnowflakeschemascredentialsauthtype.md +++ /dev/null @@ -1,8 +0,0 @@ -# DestinationSnowflakeSchemasCredentialsAuthType - - -## Values - -| Name | Value | -| ----------- | ----------- | -| `O_AUTH2_0` | OAuth2.0 | \ No newline at end of file diff --git a/docs/models/enterpriseplan.md b/docs/models/enterpriseplan.md new file mode 100644 index 00000000..b9eaf5f5 --- /dev/null +++ b/docs/models/enterpriseplan.md @@ -0,0 +1,11 @@ +# EnterprisePlan + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | +| `contacts_rate_limit` | *Optional[int]* | :heavy_minus_sign: | Maximum Rate in Limit/minute for contacts list endpoint in Enterprise Plan | +| `general_rate_limit` | *Optional[int]* | :heavy_minus_sign: | General Maximum Rate in Limit/minute for other endpoints in Enterprise Plan | +| `plan_type` | [Optional[models.SourceFreshdeskSchemasRateLimitPlanPlan]](../models/sourcefreshdeskschemasratelimitplanplan.md) | :heavy_minus_sign: | N/A | +| `tickets_rate_limit` | *Optional[int]* | :heavy_minus_sign: | Maximum Rate in Limit/minute for tickets list endpoint in Enterprise Plan | \ No newline at end of file diff --git a/docs/models/excelformat.md b/docs/models/excelformat.md index 2ad91103..bc2cfed7 100644 --- a/docs/models/excelformat.md +++ b/docs/models/excelformat.md @@ -3,6 +3,6 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | -| `filetype` | [Optional[models.SourceGcsSchemasStreamsFormatFormat6Filetype]](../models/sourcegcsschemasstreamsformatformat6filetype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | +| `filetype` | [Optional[models.SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype]](../models/sourceazureblobstorageschemasstreamsformatformatfiletype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/fieldfiltering.md b/docs/models/fieldfiltering.md new file mode 100644 index 00000000..b7cf0ab7 --- /dev/null +++ b/docs/models/fieldfiltering.md @@ -0,0 +1,8 @@ +# FieldFiltering + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------- | -------------------------------- | -------------------------------- | -------------------------------- | +| `target_field` | *str* | :heavy_check_mark: | The name of the field to filter. | \ No newline at end of file diff --git a/docs/models/format.md b/docs/models/format.md index 9f11e596..0d861406 100644 --- a/docs/models/format.md +++ b/docs/models/format.md @@ -35,3 +35,9 @@ value: models.ParquetFormat = /* values here */ value: models.UnstructuredDocumentFormat = /* values here */ ``` +### `models.ExcelFormat` + +```python +value: models.ExcelFormat = /* values here */ +``` + diff --git a/docs/models/freeplan.md b/docs/models/freeplan.md new file mode 100644 index 00000000..922212c6 --- /dev/null +++ b/docs/models/freeplan.md @@ -0,0 +1,11 @@ +# FreePlan + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------- | --------------------------------------------------------------------- | --------------------------------------------------------------------- | --------------------------------------------------------------------- | +| `contacts_rate_limit` | *Optional[int]* | :heavy_minus_sign: | Maximum Rate in Limit/minute for contacts list endpoint in Free Plan | +| `general_rate_limit` | *Optional[int]* | :heavy_minus_sign: | General Maximum Rate in Limit/minute for other endpoints in Free Plan | +| `plan_type` | [Optional[models.Plan]](../models/plan.md) | :heavy_minus_sign: | N/A | +| `tickets_rate_limit` | *Optional[int]* | :heavy_minus_sign: | Maximum Rate in Limit/minute for tickets list endpoint in Free Plan | \ No newline at end of file diff --git a/docs/models/growthplan.md b/docs/models/growthplan.md new file mode 100644 index 00000000..a70db80f --- /dev/null +++ b/docs/models/growthplan.md @@ -0,0 +1,11 @@ +# GrowthPlan + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | +| `contacts_rate_limit` | *Optional[int]* | :heavy_minus_sign: | Maximum Rate in Limit/minute for contacts list endpoint in Growth Plan | +| `general_rate_limit` | *Optional[int]* | :heavy_minus_sign: | General Maximum Rate in Limit/minute for other endpoints in Growth Plan | +| `plan_type` | [Optional[models.SourceFreshdeskPlan]](../models/sourcefreshdeskplan.md) | :heavy_minus_sign: | N/A | +| `tickets_rate_limit` | *Optional[int]* | :heavy_minus_sign: | Maximum Rate in Limit/minute for tickets list endpoint in Growth Plan | \ No newline at end of file diff --git a/docs/models/incremental.md b/docs/models/incremental.md new file mode 100644 index 00000000..cbeda911 --- /dev/null +++ b/docs/models/incremental.md @@ -0,0 +1,11 @@ +# Incremental + +Generates incrementally increasing numerical data for the source. + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `data_type` | [Optional[models.SourceDatagenDataType]](../models/sourcedatagendatatype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/insightconfig.md b/docs/models/insightconfig.md index 97a8f1fb..4e342fb2 100644 --- a/docs/models/insightconfig.md +++ b/docs/models/insightconfig.md @@ -8,7 +8,7 @@ Config for custom insights | Field | Type | Required | Description | Example | | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `name` | *str* | :heavy_check_mark: | The name value of insight | | -| `action_breakdowns` | List[[models.ValidActionBreakdowns](../models/validactionbreakdowns.md)] | :heavy_minus_sign: | A list of chosen action_breakdowns for action_breakdowns | | +| `action_breakdowns` | List[[models.SourceFacebookMarketingValidActionBreakdowns](../models/sourcefacebookmarketingvalidactionbreakdowns.md)] | :heavy_minus_sign: | A list of chosen action_breakdowns for action_breakdowns | | | `breakdowns` | List[[models.ValidBreakdowns](../models/validbreakdowns.md)] | :heavy_minus_sign: | A list of chosen breakdowns for breakdowns | | | `end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date until which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data. | 2017-01-26T00:00:00Z | | `fields` | List[[models.SourceFacebookMarketingValidEnums](../models/sourcefacebookmarketingvalidenums.md)] | :heavy_minus_sign: | A list of chosen fields for fields parameter | | diff --git a/docs/models/keypairauthentication.md b/docs/models/keypairauthentication.md index 91f083f9..5ac49d95 100644 --- a/docs/models/keypairauthentication.md +++ b/docs/models/keypairauthentication.md @@ -1,10 +1,13 @@ # KeyPairAuthentication +Configuration details for the Key Pair Authentication. + ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `private_key` | *str* | :heavy_check_mark: | RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key. | -| `auth_type` | [Optional[models.DestinationSnowflakeAuthType]](../models/destinationsnowflakeauthtype.md) | :heavy_minus_sign: | N/A | -| `private_key_password` | *Optional[str]* | :heavy_minus_sign: | Passphrase for private key | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `private_key` | *str* | :heavy_check_mark: | RSA Private key to use for Snowflake connection. See the href="https://docs.airbyte.com/integrations/destinations/snowflake">docs for more
information on how to obtain this key. | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `auth_type` | [Optional[models.DestinationSnowflakeAuthType]](../models/destinationsnowflakeauthtype.md) | :heavy_minus_sign: | N/A | +| `private_key_password` | *Optional[str]* | :heavy_minus_sign: | Passphrase for private key | \ No newline at end of file diff --git a/docs/models/mantle.md b/docs/models/mantle.md new file mode 100644 index 00000000..d427b465 --- /dev/null +++ b/docs/models/mantle.md @@ -0,0 +1,8 @@ +# Mantle + + +## Values + +| Name | Value | +| -------- | -------- | +| `MANTLE` | mantle | \ No newline at end of file diff --git a/docs/models/mapperconfiguration.md b/docs/models/mapperconfiguration.md index 90334a35..e6577142 100644 --- a/docs/models/mapperconfiguration.md +++ b/docs/models/mapperconfiguration.md @@ -11,6 +11,12 @@ The values required to configure the mapper. value: models.Hashing = /* values here */ ``` +### `models.FieldFiltering` + +```python +value: models.FieldFiltering = /* values here */ +``` + ### `models.FieldRenaming` ```python diff --git a/docs/models/metricool.md b/docs/models/metricool.md new file mode 100644 index 00000000..c60a7219 --- /dev/null +++ b/docs/models/metricool.md @@ -0,0 +1,8 @@ +# Metricool + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `METRICOOL` | metricool | \ No newline at end of file diff --git a/docs/models/nessiecatalog.md b/docs/models/nessiecatalog.md index 2dabc860..5b78c691 100644 --- a/docs/models/nessiecatalog.md +++ b/docs/models/nessiecatalog.md @@ -11,4 +11,4 @@ Configuration details for connecting to a Nessie-based Iceberg catalog. | `server_uri` | *str* | :heavy_check_mark: | The base URL of the Nessie server used to connect to the Nessie catalog. | | | `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | | `access_token` | *Optional[str]* | :heavy_minus_sign: | Optional token for authentication with the Nessie server. | a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY | -| `catalog_type` | [Optional[models.DestinationS3DataLakeSchemasCatalogTypeCatalogType]](../models/destinations3datalakeschemascatalogtypecatalogtype.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file +| `catalog_type` | [Optional[models.DestinationS3DataLakeSchemasCatalogTypeCatalogTypeCatalogType]](../models/destinations3datalakeschemascatalogtypecatalogtypecatalogtype.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/oauth2.md b/docs/models/oauth2.md new file mode 100644 index 00000000..f752e8f0 --- /dev/null +++ b/docs/models/oauth2.md @@ -0,0 +1,11 @@ +# OAuth2 + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `client_id` | *str* | :heavy_check_mark: | The client ID of your Ticktick application. Read more here. | +| `client_secret` | *str* | :heavy_check_mark: | The client secret of of your Ticktick application. application. Read more here. | +| `auth_type` | [models.SourceTicktickAuthType](../models/sourceticktickauthtype.md) | :heavy_check_mark: | N/A | +| `client_access_token` | *Optional[str]* | :heavy_minus_sign: | Access token for making authenticated requests; filled after complete oauth2 flow. | \ No newline at end of file diff --git a/docs/models/oauthactornames.md b/docs/models/oauthactornames.md index 7c0a3912..e89427a2 100644 --- a/docs/models/oauthactornames.md +++ b/docs/models/oauthactornames.md @@ -41,6 +41,7 @@ | `SMARTSHEETS` | smartsheets | | `SNAPCHAT_MARKETING` | snapchat-marketing | | `SURVEYMONKEY` | surveymonkey | +| `TICKTICK` | ticktick | | `TIKTOK_MARKETING` | tiktok-marketing | | `TRELLO` | trello | | `TYPEFORM` | typeform | diff --git a/docs/models/objectstorageconfiguration.md b/docs/models/objectstorageconfiguration.md index 1d89ee13..cf3f03bc 100644 --- a/docs/models/objectstorageconfiguration.md +++ b/docs/models/objectstorageconfiguration.md @@ -3,15 +3,15 @@ ## Supported Types -### `models.NoneT` +### `models.DestinationHubspotNone` ```python -value: models.NoneT = /* values here */ +value: models.DestinationHubspotNone = /* values here */ ``` -### `models.DestinationCustomerIoS3` +### `models.DestinationHubspotS3` ```python -value: models.DestinationCustomerIoS3 = /* values here */ +value: models.DestinationHubspotS3 = /* values here */ ``` diff --git a/docs/models/objectstoragespec.md b/docs/models/objectstoragespec.md new file mode 100644 index 00000000..1c16906b --- /dev/null +++ b/docs/models/objectstoragespec.md @@ -0,0 +1,17 @@ +# ObjectStorageSpec + + +## Supported Types + +### `models.NoneT` + +```python +value: models.NoneT = /* values here */ +``` + +### `models.DestinationCustomerIoS3` + +```python +value: models.DestinationCustomerIoS3 = /* values here */ +``` + diff --git a/docs/models/outlook.md b/docs/models/outlook.md new file mode 100644 index 00000000..eaa2fa77 --- /dev/null +++ b/docs/models/outlook.md @@ -0,0 +1,8 @@ +# Outlook + + +## Values + +| Name | Value | +| --------- | --------- | +| `OUTLOOK` | outlook | \ No newline at end of file diff --git a/docs/models/raas.md b/docs/models/plan.md similarity index 67% rename from docs/models/raas.md rename to docs/models/plan.md index 43d15a15..fccc8c9b 100644 --- a/docs/models/raas.md +++ b/docs/models/plan.md @@ -1,8 +1,8 @@ -# Raas +# Plan ## Values | Name | Value | | ------ | ------ | -| `RAAS` | RAAS | \ No newline at end of file +| `FREE` | free | \ No newline at end of file diff --git a/docs/models/polariscatalog.md b/docs/models/polariscatalog.md new file mode 100644 index 00000000..50c1d912 --- /dev/null +++ b/docs/models/polariscatalog.md @@ -0,0 +1,16 @@ +# PolarisCatalog + +Configuration details for connecting to an Apache Polaris-based Iceberg catalog. + + +## Fields + +| Field | Type | Required | Description | Example | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `catalog_name` | *str* | :heavy_check_mark: | The name of the catalog in Polaris. This corresponds to the catalog name created via the Polaris Management API. | | +| `client_id` | *str* | :heavy_check_mark: | The OAuth Client ID for authenticating with the Polaris server. | abc123clientid | +| `client_secret` | *str* | :heavy_check_mark: | The OAuth Client Secret for authenticating with the Polaris server. | secretkey123 | +| `namespace` | *str* | :heavy_check_mark: | The Polaris namespace to be used in the Table identifier.
This will ONLY be used if the `Destination Namespace` setting for the connection is set to
`Destination-defined` or `Source-defined` | | +| `server_uri` | *str* | :heavy_check_mark: | The base URL of the Polaris server used to connect to the Polaris catalog. | | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | +| `catalog_type` | [Optional[models.DestinationS3DataLakeSchemasCatalogTypeCatalogType]](../models/destinations3datalakeschemascatalogtypecatalogtype.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/proplan.md b/docs/models/proplan.md new file mode 100644 index 00000000..cd841f75 --- /dev/null +++ b/docs/models/proplan.md @@ -0,0 +1,11 @@ +# ProPlan + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | +| `contacts_rate_limit` | *Optional[int]* | :heavy_minus_sign: | Maximum Rate in Limit/minute for contacts list endpoint in Pro Plan | +| `general_rate_limit` | *Optional[int]* | :heavy_minus_sign: | General Maximum Rate in Limit/minute for other endpoints in Pro Plan | +| `plan_type` | [Optional[models.SourceFreshdeskSchemasPlan]](../models/sourcefreshdeskschemasplan.md) | :heavy_minus_sign: | N/A | +| `tickets_rate_limit` | *Optional[int]* | :heavy_minus_sign: | Maximum Rate in Limit/minute for tickets list endpoint in Pro Plan | \ No newline at end of file diff --git a/docs/models/ratelimitplan.md b/docs/models/ratelimitplan.md new file mode 100644 index 00000000..af2cf545 --- /dev/null +++ b/docs/models/ratelimitplan.md @@ -0,0 +1,37 @@ +# RateLimitPlan + +Rate Limit Plan for API Budget + + +## Supported Types + +### `models.FreePlan` + +```python +value: models.FreePlan = /* values here */ +``` + +### `models.GrowthPlan` + +```python +value: models.GrowthPlan = /* values here */ +``` + +### `models.ProPlan` + +```python +value: models.ProPlan = /* values here */ +``` + +### `models.EnterprisePlan` + +```python +value: models.EnterprisePlan = /* values here */ +``` + +### `models.CustomPlan` + +```python +value: models.CustomPlan = /* values here */ +``` + diff --git a/docs/models/readchangesusingchangedatacapturecdc.md b/docs/models/readchangesusingchangedatacapturecdc.md index 806dc7f9..896e7b67 100644 --- a/docs/models/readchangesusingchangedatacapturecdc.md +++ b/docs/models/readchangesusingchangedatacapturecdc.md @@ -1,14 +1,12 @@ # ReadChangesUsingChangeDataCaptureCDC -Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database. +Recommended - Incrementally reads new inserts, updates, and deletes using change data capture feature. This must be enabled on your database. ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `initial_load_timeout_hours` | *Optional[int]* | :heavy_minus_sign: | The amount of time an initial load is allowed to continue for before catching up on CDC logs. | -| `initial_waiting_seconds` | *Optional[int]* | :heavy_minus_sign: | The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 3600 seconds. Read about initial waiting time. | -| `invalid_cdc_cursor_position_behavior` | [Optional[models.SourceMssqlInvalidCDCPositionBehaviorAdvanced]](../models/sourcemssqlinvalidcdcpositionbehavioradvanced.md) | :heavy_minus_sign: | Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss. | -| `method` | [models.SourceMssqlMethod](../models/sourcemssqlmethod.md) | :heavy_check_mark: | N/A | -| `queue_size` | *Optional[int]* | :heavy_minus_sign: | The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful. | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | +| `cursor_method` | [Optional[models.SourceDb2EnterpriseCursorMethod]](../models/sourcedb2enterprisecursormethod.md) | :heavy_minus_sign: | N/A | +| `initial_load_timeout_hours` | *Optional[int]* | :heavy_minus_sign: | The amount of time an initial load is allowed to continue for before catching up on CDC events. | \ No newline at end of file diff --git a/docs/models/reportbasedstreams.md b/docs/models/reportbasedstreams.md deleted file mode 100644 index 8900adf5..00000000 --- a/docs/models/reportbasedstreams.md +++ /dev/null @@ -1,11 +0,0 @@ -# ReportBasedStreams - - -## Fields - -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `password` | *str* | :heavy_check_mark: | N/A | | -| `report_ids` | List[*Any*] | :heavy_check_mark: | Report IDs can be found by clicking the three dots on the right side of the report > Web Service > View URLs > in JSON url copy everything between Workday tenant/ and ?format=json. | for JSON url https://hostname/ccx/service/customreport2/tenant/report/id?format=json Report ID is report/id. | -| `username` | *str* | :heavy_check_mark: | N/A | | -| `auth_type` | [models.Raas](../models/raas.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/reportids.md b/docs/models/reportids.md new file mode 100644 index 00000000..ba2b76ae --- /dev/null +++ b/docs/models/reportids.md @@ -0,0 +1,8 @@ +# ReportIds + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `report_id` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/rest.md b/docs/models/rest.md deleted file mode 100644 index dfc37a3b..00000000 --- a/docs/models/rest.md +++ /dev/null @@ -1,8 +0,0 @@ -# Rest - - -## Values - -| Name | Value | -| ------ | ------ | -| `REST` | REST | \ No newline at end of file diff --git a/docs/models/restapistreams.md b/docs/models/restapistreams.md deleted file mode 100644 index 3a59fae9..00000000 --- a/docs/models/restapistreams.md +++ /dev/null @@ -1,10 +0,0 @@ -# RESTAPIStreams - - -## Fields - -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | -| `access_token` | *str* | :heavy_check_mark: | Follow the instructions in the "OAuth 2.0 in Postman - API Client for Integrations" article in the Workday community docs to obtain access token. | | -| `auth_type` | [models.Rest](../models/rest.md) | :heavy_check_mark: | N/A | | -| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | Rows after this date will be synced, default 2 years ago. | 2024-10-26T07:00:00.000Z | \ No newline at end of file diff --git a/docs/models/scanchangeswithuserdefinedcursor.md b/docs/models/scanchangeswithuserdefinedcursor.md index 52e07e43..f066c76f 100644 --- a/docs/models/scanchangeswithuserdefinedcursor.md +++ b/docs/models/scanchangeswithuserdefinedcursor.md @@ -5,7 +5,7 @@ Incrementally detects new inserts and updates using the docs for more information on how to obtain this key. | | -| `domain` | *str* | :heavy_check_mark: | Used to construct Base URL for the Freshcaller APIs | snaptravel | -| `requests_per_minute` | *Optional[int]* | :heavy_minus_sign: | The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account. | | -| `source_type` | [models.Freshcaller](../models/freshcaller.md) | :heavy_check_mark: | N/A | | -| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time. Any data created after this date will be replicated. | 2022-01-01T12:00:00Z | -| `sync_lag_minutes` | *Optional[int]* | :heavy_minus_sign: | Lag in minutes for each sync, i.e., at time T, data for the time range [prev_sync_time, T-30] will be fetched | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| --------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Freshcaller API Key. See the docs for more information on how to obtain this key. | | +| `domain` | *str* | :heavy_check_mark: | Used to construct Base URL for the Freshcaller APIs | snaptravel | +| `requests_per_minute` | *Optional[int]* | :heavy_minus_sign: | The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account. | | +| `source_type` | [models.Freshcaller](../models/freshcaller.md) | :heavy_check_mark: | N/A | | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time. Any data created after this date will be replicated. | 2022-01-01T12:00:00Z | +| `sync_lag_minutes` | *Optional[int]* | :heavy_minus_sign: | Lag in minutes for each sync, i.e., at time T, data for the time range [prev_sync_time, T-30] will be fetched | | \ No newline at end of file diff --git a/docs/models/sourcefreshdesk.md b/docs/models/sourcefreshdesk.md index 0e3bd68e..b1e6cc2e 100644 --- a/docs/models/sourcefreshdesk.md +++ b/docs/models/sourcefreshdesk.md @@ -8,6 +8,7 @@ | `api_key` | *str* | :heavy_check_mark: | Freshdesk API Key. See the docs for more information on how to obtain this key. | | | `domain` | *str* | :heavy_check_mark: | Freshdesk domain | myaccount.freshdesk.com | | `lookback_window_in_days` | *Optional[int]* | :heavy_minus_sign: | Number of days for lookback window for the stream Satisfaction Ratings | | +| `rate_limit_plan` | [Optional[models.RateLimitPlan]](../models/ratelimitplan.md) | :heavy_minus_sign: | Rate Limit Plan for API Budget | | | `requests_per_minute` | *Optional[int]* | :heavy_minus_sign: | The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account. | | | `source_type` | [models.Freshdesk](../models/freshdesk.md) | :heavy_check_mark: | N/A | | | `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time. Any data created after this date will be replicated. If this parameter is not set, all data will be replicated. | 2020-12-01T00:00:00Z | \ No newline at end of file diff --git a/docs/models/sourcefreshdeskplan.md b/docs/models/sourcefreshdeskplan.md new file mode 100644 index 00000000..4111cb1c --- /dev/null +++ b/docs/models/sourcefreshdeskplan.md @@ -0,0 +1,8 @@ +# SourceFreshdeskPlan + + +## Values + +| Name | Value | +| -------- | -------- | +| `GROWTH` | growth | \ No newline at end of file diff --git a/docs/models/sourcefreshdeskschemasplan.md b/docs/models/sourcefreshdeskschemasplan.md new file mode 100644 index 00000000..c8642ff4 --- /dev/null +++ b/docs/models/sourcefreshdeskschemasplan.md @@ -0,0 +1,8 @@ +# SourceFreshdeskSchemasPlan + + +## Values + +| Name | Value | +| ----- | ----- | +| `PRO` | pro | \ No newline at end of file diff --git a/docs/models/sourcefreshdeskschemasratelimitplanplan.md b/docs/models/sourcefreshdeskschemasratelimitplanplan.md new file mode 100644 index 00000000..9ec4963c --- /dev/null +++ b/docs/models/sourcefreshdeskschemasratelimitplanplan.md @@ -0,0 +1,8 @@ +# SourceFreshdeskSchemasRateLimitPlanPlan + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `ENTERPRISE` | enterprise | \ No newline at end of file diff --git a/docs/models/sourcefreshdeskschemasratelimitplanratelimitplanplan.md b/docs/models/sourcefreshdeskschemasratelimitplanratelimitplanplan.md new file mode 100644 index 00000000..58ff343d --- /dev/null +++ b/docs/models/sourcefreshdeskschemasratelimitplanratelimitplanplan.md @@ -0,0 +1,8 @@ +# SourceFreshdeskSchemasRateLimitPlanRateLimitPlanPlan + + +## Values + +| Name | Value | +| -------- | -------- | +| `CUSTOM` | custom | \ No newline at end of file diff --git a/docs/models/sourcegcsexcelformat.md b/docs/models/sourcegcsexcelformat.md new file mode 100644 index 00000000..3550de85 --- /dev/null +++ b/docs/models/sourcegcsexcelformat.md @@ -0,0 +1,8 @@ +# SourceGcsExcelFormat + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | +| `filetype` | [Optional[models.SourceGcsSchemasStreamsFormatFormat6Filetype]](../models/sourcegcsschemasstreamsformatformat6filetype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcegcsformat.md b/docs/models/sourcegcsformat.md index 7a4a6199..add1c4d4 100644 --- a/docs/models/sourcegcsformat.md +++ b/docs/models/sourcegcsformat.md @@ -35,9 +35,9 @@ value: models.SourceGcsParquetFormat = /* values here */ value: models.SourceGcsUnstructuredDocumentFormat = /* values here */ ``` -### `models.ExcelFormat` +### `models.SourceGcsExcelFormat` ```python -value: models.ExcelFormat = /* values here */ +value: models.SourceGcsExcelFormat = /* values here */ ``` diff --git a/docs/models/sourcemantle.md b/docs/models/sourcemantle.md new file mode 100644 index 00000000..9d7de36f --- /dev/null +++ b/docs/models/sourcemantle.md @@ -0,0 +1,10 @@ +# SourceMantle + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Mantle](../models/mantle.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcemetricool.md b/docs/models/sourcemetricool.md new file mode 100644 index 00000000..fb728aad --- /dev/null +++ b/docs/models/sourcemetricool.md @@ -0,0 +1,13 @@ +# SourceMetricool + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | +| `blog_ids` | List[*Any*] | :heavy_check_mark: | Brand IDs | +| `user_id` | *str* | :heavy_check_mark: | Account ID | +| `user_token` | *str* | :heavy_check_mark: | User token to authenticate API requests. Find it in the Account Settings menu, API section of your Metricool account. | +| `end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | If not set, defaults to current datetime. | +| `source_type` | [models.Metricool](../models/metricool.md) | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | If not set, defaults to 60 days back. If below "End Date", defaults to 1 day before "End Date" | \ No newline at end of file diff --git a/docs/models/sourcemixpanel.md b/docs/models/sourcemixpanel.md index 6e5b03c9..5332dfe6 100644 --- a/docs/models/sourcemixpanel.md +++ b/docs/models/sourcemixpanel.md @@ -3,16 +3,17 @@ ## Fields -| Field | Type | Required | Description | Example | -| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `credentials` | [models.AuthenticationWildcard](../models/authenticationwildcard.md) | :heavy_check_mark: | Choose how to authenticate to Mixpanel | | -| `attribution_window` | *Optional[int]* | :heavy_minus_sign: | A period of time for attributing results to ads and the lookback period after those actions occur during which ad results are counted. Default attribution window is 5 days. (This value should be non-negative integer) | | -| `date_window_size` | *Optional[int]* | :heavy_minus_sign: | Defines window size in days, that used to slice through data. You can reduce it, if amount of data in each window is too big for your environment. (This value should be positive integer) | | -| `end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date in the format YYYY-MM-DD. Any data after this date will not be replicated. Left empty to always sync to most recent date | 2021-11-16 | -| `export_lookback_window` | *Optional[int]* | :heavy_minus_sign: | The number of seconds to look back from the last synced timestamp during incremental syncs of the Export stream. This ensures no data is missed due to delays in event recording. Default is 0 seconds. Must be a non-negative integer. | | -| `page_size` | *Optional[int]* | :heavy_minus_sign: | The number of records to fetch per request for the engage stream. Default is 1000. If you are experiencing long sync times with this stream, try increasing this value. | | -| `project_timezone` | *Optional[str]* | :heavy_minus_sign: | Time zone in which integer date times are stored. The project timezone may be found in the project settings in the Mixpanel console. | US/Pacific | -| `region` | [Optional[models.SourceMixpanelRegion]](../models/sourcemixpanelregion.md) | :heavy_minus_sign: | The region of mixpanel domain instance either US or EU. | | -| `select_properties_by_default` | *Optional[bool]* | :heavy_minus_sign: | Setting this config parameter to TRUE ensures that new properties on events and engage records are captured. Otherwise new properties will be ignored. | | -| `source_type` | [models.Mixpanel](../models/mixpanel.md) | :heavy_check_mark: | N/A | | -| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date in the format YYYY-MM-DD. Any data before this date will not be replicated. If this option is not set, the connector will replicate data from up to one year ago by default. | 2021-11-16 | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `credentials` | [models.AuthenticationWildcard](../models/authenticationwildcard.md) | :heavy_check_mark: | Choose how to authenticate to Mixpanel | | +| `attribution_window` | *Optional[int]* | :heavy_minus_sign: | A period of time for attributing results to ads and the lookback period after those actions occur during which ad results are counted. Default attribution window is 5 days. (This value should be non-negative integer) | | +| `date_window_size` | *Optional[int]* | :heavy_minus_sign: | Defines window size in days, that used to slice through data. You can reduce it, if amount of data in each window is too big for your environment. (This value should be positive integer) | | +| `end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date in the format YYYY-MM-DD. Any data after this date will not be replicated. Left empty to always sync to most recent date | 2021-11-16 | +| `export_lookback_window` | *Optional[int]* | :heavy_minus_sign: | The number of seconds to look back from the last synced timestamp during incremental syncs of the Export stream. This ensures no data is missed due to delays in event recording. Default is 0 seconds. Must be a non-negative integer. | | +| `num_workers` | *Optional[int]* | :heavy_minus_sign: | The number of worker threads to use for the sync. The performance upper boundary is based on the limit of your Mixpanel pricing plan. More info about the rate limit tiers can be found on Mixpanel's API docs. | 1 | +| `page_size` | *Optional[int]* | :heavy_minus_sign: | The number of records to fetch per request for the engage stream. Default is 1000. If you are experiencing long sync times with this stream, try increasing this value. | | +| `project_timezone` | *Optional[str]* | :heavy_minus_sign: | Time zone in which integer date times are stored. The project timezone may be found in the project settings in the Mixpanel console. | US/Pacific | +| `region` | [Optional[models.SourceMixpanelRegion]](../models/sourcemixpanelregion.md) | :heavy_minus_sign: | The region of mixpanel domain instance either US or EU. | | +| `select_properties_by_default` | *Optional[bool]* | :heavy_minus_sign: | Setting this config parameter to TRUE ensures that new properties on events and engage records are captured. Otherwise new properties will be ignored. | | +| `source_type` | [models.Mixpanel](../models/mixpanel.md) | :heavy_check_mark: | N/A | | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date in the format YYYY-MM-DD. Any data before this date will not be replicated. If this option is not set, the connector will replicate data from up to one year ago by default. | 2021-11-16 | \ No newline at end of file diff --git a/docs/models/sourcemssql.md b/docs/models/sourcemssql.md index 289ec825..200da579 100644 --- a/docs/models/sourcemssql.md +++ b/docs/models/sourcemssql.md @@ -11,7 +11,7 @@ | `port` | *int* | :heavy_check_mark: | The port of the database. | 1433 | | `username` | *str* | :heavy_check_mark: | The username which is used to access the database. | | | `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). | | -| `replication_method` | [Optional[models.UpdateMethod]](../models/updatemethod.md) | :heavy_minus_sign: | Configures how data is extracted from the database. | | +| `replication_method` | [Optional[models.SourceMssqlUpdateMethod]](../models/sourcemssqlupdatemethod.md) | :heavy_minus_sign: | Configures how data is extracted from the database. | | | `schemas` | List[*str*] | :heavy_minus_sign: | The list of schemas to sync from. Defaults to user. Case sensitive. | | | `source_type` | [models.SourceMssqlMssql](../models/sourcemssqlmssql.md) | :heavy_check_mark: | N/A | | | `ssl_method` | [Optional[models.SourceMssqlSSLMethod]](../models/sourcemssqlsslmethod.md) | :heavy_minus_sign: | The encryption method which is used when communicating with the database. | | diff --git a/docs/models/sourcemssqlreadchangesusingchangedatacapturecdc.md b/docs/models/sourcemssqlreadchangesusingchangedatacapturecdc.md new file mode 100644 index 00000000..cf50587e --- /dev/null +++ b/docs/models/sourcemssqlreadchangesusingchangedatacapturecdc.md @@ -0,0 +1,14 @@ +# SourceMssqlReadChangesUsingChangeDataCaptureCDC + +Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `initial_load_timeout_hours` | *Optional[int]* | :heavy_minus_sign: | The amount of time an initial load is allowed to continue for before catching up on CDC logs. | +| `initial_waiting_seconds` | *Optional[int]* | :heavy_minus_sign: | The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 3600 seconds. Read about initial waiting time. | +| `invalid_cdc_cursor_position_behavior` | [Optional[models.SourceMssqlInvalidCDCPositionBehaviorAdvanced]](../models/sourcemssqlinvalidcdcpositionbehavioradvanced.md) | :heavy_minus_sign: | Determines whether Airbyte should fail or re-sync data in case of an stale/invalid cursor value into the WAL. If 'Fail sync' is chosen, a user will have to manually reset the connection before being able to continue syncing data. If 'Re-sync data' is chosen, Airbyte will automatically trigger a refresh but could lead to higher cloud costs and data loss. | +| `method` | [models.SourceMssqlMethod](../models/sourcemssqlmethod.md) | :heavy_check_mark: | N/A | +| `queue_size` | *Optional[int]* | :heavy_minus_sign: | The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful. | \ No newline at end of file diff --git a/docs/models/sourcemssqlscanchangeswithuserdefinedcursor.md b/docs/models/sourcemssqlscanchangeswithuserdefinedcursor.md new file mode 100644 index 00000000..b68c1916 --- /dev/null +++ b/docs/models/sourcemssqlscanchangeswithuserdefinedcursor.md @@ -0,0 +1,11 @@ +# SourceMssqlScanChangesWithUserDefinedCursor + +Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `exclude_todays_data` | *Optional[bool]* | :heavy_minus_sign: | When enabled incremental syncs using a cursor of a temporal types (date or datetime) will include cursor values only up until last midnight (Advanced) | +| `method` | [models.SourceMssqlSchemasMethod](../models/sourcemssqlschemasmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcemssqlupdatemethod.md b/docs/models/sourcemssqlupdatemethod.md new file mode 100644 index 00000000..5df67115 --- /dev/null +++ b/docs/models/sourcemssqlupdatemethod.md @@ -0,0 +1,19 @@ +# SourceMssqlUpdateMethod + +Configures how data is extracted from the database. + + +## Supported Types + +### `models.SourceMssqlReadChangesUsingChangeDataCaptureCDC` + +```python +value: models.SourceMssqlReadChangesUsingChangeDataCaptureCDC = /* values here */ +``` + +### `models.SourceMssqlScanChangesWithUserDefinedCursor` + +```python +value: models.SourceMssqlScanChangesWithUserDefinedCursor = /* values here */ +``` + diff --git a/docs/models/sourcenetsuiteenterprisecursormethod.md b/docs/models/sourcenetsuiteenterprisecursormethod.md new file mode 100644 index 00000000..997fc294 --- /dev/null +++ b/docs/models/sourcenetsuiteenterprisecursormethod.md @@ -0,0 +1,8 @@ +# SourceNetsuiteEnterpriseCursorMethod + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `USER_DEFINED` | user_defined | \ No newline at end of file diff --git a/docs/models/sourcenetsuiteenterprisescanchangeswithuserdefinedcursor.md b/docs/models/sourcenetsuiteenterprisescanchangeswithuserdefinedcursor.md index aaa487fd..2c0fe464 100644 --- a/docs/models/sourcenetsuiteenterprisescanchangeswithuserdefinedcursor.md +++ b/docs/models/sourcenetsuiteenterprisescanchangeswithuserdefinedcursor.md @@ -5,7 +5,7 @@ Incrementally detects new inserts and updates using the guide to retrieve it. | | | `is_sandbox` | *Optional[bool]* | :heavy_minus_sign: | Whether or not the the app is in a Salesforce sandbox. If you do not know what this, assume it is false. | | -| `page_size` | *Optional[str]* | :heavy_minus_sign: | The maximum number of records to return per request | | | `source_type` | [models.Pardot](../models/pardot.md) | :heavy_check_mark: | N/A | | | `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2000-01-01T00:00:00Z. Any data before this date will not be replicated. Defaults to the year Pardot was released. | 2021-07-25T00:00:00Z | \ No newline at end of file diff --git a/docs/models/sourcepinterest.md b/docs/models/sourcepinterest.md index b056fc9e..52e42907 100644 --- a/docs/models/sourcepinterest.md +++ b/docs/models/sourcepinterest.md @@ -8,6 +8,7 @@ | `account_id` | *Optional[str]* | :heavy_minus_sign: | The Pinterest account ID you want to fetch data for. This ID must be provided to filter the data for a specific account. | 1234567890 | | `credentials` | [Optional[models.OAuth20]](../models/oauth20.md) | :heavy_minus_sign: | N/A | | | `custom_reports` | List[[models.ReportConfig](../models/reportconfig.md)] | :heavy_minus_sign: | A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on "add" to fill this field. | | +| `num_threads` | *Optional[int]* | :heavy_minus_sign: | The number of parallel threads to use for the sync. | 1 | | `source_type` | [Optional[models.SourcePinterestPinterest]](../models/sourcepinterestpinterest.md) | :heavy_minus_sign: | N/A | | | `start_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | A date in the format YYYY-MM-DD. If you have not set a date, it would be defaulted to latest allowed date by api (89 days from today). | 2022-07-28 | | `status` | List[[models.Status](../models/status.md)] | :heavy_minus_sign: | For the ads, ad_groups, and campaigns streams, specifying a status will filter out records that do not match the specified ones. If a status is not specified, the source will default to records with a status of either ACTIVE or PAUSED. | | \ No newline at end of file diff --git a/docs/models/sourcepostgres.md b/docs/models/sourcepostgres.md index 682e3b0c..ac84b290 100644 --- a/docs/models/sourcepostgres.md +++ b/docs/models/sourcepostgres.md @@ -8,6 +8,9 @@ | `database` | *str* | :heavy_check_mark: | Name of the database. | | | `host` | *str* | :heavy_check_mark: | Hostname of the database. | | | `username` | *str* | :heavy_check_mark: | Username to access the database. | | +| `entra_client_id` | *Optional[str]* | :heavy_minus_sign: | If using Entra service principal, the application ID of the service principal | | +| `entra_service_principal_auth` | *Optional[bool]* | :heavy_minus_sign: | Interpret password as a client secret for a Microsft Entra service principal | | +| `entra_tenant_id` | *Optional[str]* | :heavy_minus_sign: | If using Entra service principal, the ID of the tenant | | | `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters. | | | `password` | *Optional[str]* | :heavy_minus_sign: | Password associated with the username. | | | `port` | *Optional[int]* | :heavy_minus_sign: | Port of the database. | 5432 | diff --git a/docs/models/sourcesaphanaenterprise.md b/docs/models/sourcesaphanaenterprise.md index 5f502e26..83e0e40b 100644 --- a/docs/models/sourcesaphanaenterprise.md +++ b/docs/models/sourcesaphanaenterprise.md @@ -13,8 +13,10 @@ | `check_privileges` | *Optional[bool]* | :heavy_minus_sign: | When this feature is enabled, during schema discovery the connector will query each table or view individually to check access privileges and inaccessible tables, views, or columns therein will be removed. In large schemas, this might cause schema discovery to take too long, in which case it might be advisable to disable this feature. | | `checkpoint_target_interval_seconds` | *Optional[int]* | :heavy_minus_sign: | How often (in seconds) a stream should checkpoint, when possible. | | `concurrency` | *Optional[int]* | :heavy_minus_sign: | Maximum number of concurrent queries to the database. | +| `database` | *Optional[str]* | :heavy_minus_sign: | The name of the tenant database to connect to. This is required for multi-tenant SAP HANA systems. For single-tenant systems, this can be left empty. | +| `filters` | List[[models.SourceSapHanaEnterpriseTableFilter](../models/sourcesaphanaenterprisetablefilter.md)] | :heavy_minus_sign: | Inclusion filters for table selection per schema. If no filters are specified for a schema, all tables in that schema will be synced. | | `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). | | `password` | *Optional[str]* | :heavy_minus_sign: | The password associated with the username. | -| `port` | *Optional[int]* | :heavy_minus_sign: | Port of the database.
SapHana Corporations recommends the following port numbers:
443 - Default listening port for SAP HANA cloud client connections to the listener. | +| `port` | *Optional[int]* | :heavy_minus_sign: | Port of the database.
SAP recommends the following port numbers:
443 - Default listening port for SAP HANA Cloud client connections to the listener. | | `schemas` | List[*str*] | :heavy_minus_sign: | The list of schemas to sync from. Defaults to user. Case sensitive. | | `source_type` | [models.SapHanaEnterprise](../models/saphanaenterprise.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesaphanaenterprisetablefilter.md b/docs/models/sourcesaphanaenterprisetablefilter.md new file mode 100644 index 00000000..3ccee6da --- /dev/null +++ b/docs/models/sourcesaphanaenterprisetablefilter.md @@ -0,0 +1,12 @@ +# SourceSapHanaEnterpriseTableFilter + +Inclusion filter configuration for table selection per schema. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | +| `schema_name` | *str* | :heavy_check_mark: | The name of the schema to apply this filter to. Should match a schema defined in "Schemas" field above. | +| `table_name_patterns` | List[*str*] | :heavy_check_mark: | List of table name patterns to include from this schema. Each filter should be a SQL LIKE pattern. | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesharepointenterprise.md b/docs/models/sourcesharepointenterprise.md index d558a1e1..4246b7be 100644 --- a/docs/models/sourcesharepointenterprise.md +++ b/docs/models/sourcesharepointenterprise.md @@ -11,6 +11,7 @@ This class combines the authentication details with additional configuration for | `credentials` | [models.SourceSharepointEnterpriseAuthentication](../models/sourcesharepointenterpriseauthentication.md) | :heavy_check_mark: | Credentials for connecting to the One Drive API | | | `streams` | List[[models.SourceSharepointEnterpriseFileBasedStreamConfig](../models/sourcesharepointenterprisefilebasedstreamconfig.md)] | :heavy_check_mark: | Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. | | | `delivery_method` | [Optional[models.SourceSharepointEnterpriseDeliveryMethod]](../models/sourcesharepointenterprisedeliverymethod.md) | :heavy_minus_sign: | N/A | | +| `file_contains_query` | List[*str*] | :heavy_minus_sign: | Input additional query to search files. It will make search files step faster if your Sharepoint account has a lot of files and folders. This query text will be used in the request that will look for files which properties contains inserted text. You can use multiple query texts, they will be applied in search request one by one. | | | `folder_path` | *Optional[str]* | :heavy_minus_sign: | Path to a specific folder within the drives to search for files. Leave empty to search all folders of the drives. This does not apply to shared items. | | | `search_scope` | [Optional[models.SourceSharepointEnterpriseSearchScope]](../models/sourcesharepointenterprisesearchscope.md) | :heavy_minus_sign: | Specifies the location(s) to search for files. Valid options are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access, 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to search both. | | | `site_url` | *Optional[str]* | :heavy_minus_sign: | Url of SharePoint site to search for files. Leave empty to search in the main site. Use 'https://.sharepoint.com/sites/' to iterate over all sites. | | diff --git a/docs/models/sourcesharepointenterpriseauthenticateviamicrosoftoauth.md b/docs/models/sourcesharepointenterpriseauthenticateviamicrosoftoauth.md index 7cd175a3..a6cd043c 100644 --- a/docs/models/sourcesharepointenterpriseauthenticateviamicrosoftoauth.md +++ b/docs/models/sourcesharepointenterpriseauthenticateviamicrosoftoauth.md @@ -6,10 +6,11 @@ This class uses pydantic for data validation and settings management. ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------ | -| `client_id` | *str* | :heavy_check_mark: | Client ID of your Microsoft developer application | -| `client_secret` | *str* | :heavy_check_mark: | Client Secret of your Microsoft developer application | -| `tenant_id` | *str* | :heavy_check_mark: | Tenant ID of the Microsoft SharePoint user | -| `auth_type` | [Optional[models.SourceSharepointEnterpriseAuthType]](../models/sourcesharepointenterpriseauthtype.md) | :heavy_minus_sign: | N/A | -| `refresh_token` | *Optional[str]* | :heavy_minus_sign: | Refresh Token of your Microsoft developer application | \ No newline at end of file +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `client_id` | *str* | :heavy_check_mark: | Client ID of your Microsoft developer application | +| `client_secret` | *str* | :heavy_check_mark: | Client Secret of your Microsoft developer application | +| `tenant_id` | *str* | :heavy_check_mark: | Tenant ID of the Microsoft SharePoint user | +| `auth_type` | [Optional[models.SourceSharepointEnterpriseAuthType]](../models/sourcesharepointenterpriseauthtype.md) | :heavy_minus_sign: | N/A | +| `refresh_token` | *Optional[str]* | :heavy_minus_sign: | Refresh Token of your Microsoft developer application | +| `scopes` | *Optional[str]* | :heavy_minus_sign: | Scopes to request when authorizing. If you want to change scopes after source was created, you need to Re-authenticate to actually apply this change to your access token. | \ No newline at end of file diff --git a/docs/models/sourceslack.md b/docs/models/sourceslack.md index ba00a957..4fa477d6 100644 --- a/docs/models/sourceslack.md +++ b/docs/models/sourceslack.md @@ -10,7 +10,7 @@ | `channel_messages_window_size` | *Optional[int]* | :heavy_minus_sign: | The size (in days) of the date window that will be used while syncing data from the channel messages stream. A smaller window will allow for greater parallelization when syncing records, but can lead to rate limiting errors. | 30 | | `credentials` | [Optional[models.SourceSlackAuthenticationMechanism]](../models/sourceslackauthenticationmechanism.md) | :heavy_minus_sign: | Choose how to authenticate into Slack | | | `include_private_channels` | *Optional[bool]* | :heavy_minus_sign: | Whether to read information from private channels that the bot is already in. If false, only public channels will be read. If true, the bot must be manually added to private channels. | | -| `join_channels` | *Optional[bool]* | :heavy_minus_sign: | Whether to join all channels or to sync data only from channels the bot is already in. If false, you'll need to manually add the bot to all the channels from which you'd like to sync messages. | | +| `join_channels` | *Optional[bool]* | :heavy_minus_sign: | Whether to join all channels or to sync data only from channels the bot is already in. If false, you''ll need to manually add the bot to all the channels from which you''d like to sync messages. | | | `lookback_window` | *Optional[int]* | :heavy_minus_sign: | How far into the past to look for messages in threads, default is 0 days | 7 | | `num_workers` | *Optional[int]* | :heavy_minus_sign: | The number of worker threads to use for the sync. | 2 | | `source_type` | [models.SourceSlackSlack](../models/sourceslackslack.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourceticktick.md b/docs/models/sourceticktick.md new file mode 100644 index 00000000..1a80d7aa --- /dev/null +++ b/docs/models/sourceticktick.md @@ -0,0 +1,9 @@ +# SourceTicktick + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | +| `authorization` | [Optional[models.SourceTicktickAuthenticationType]](../models/sourceticktickauthenticationtype.md) | :heavy_minus_sign: | N/A | +| `source_type` | [Optional[models.SourceTicktickTicktick]](../models/sourceticktickticktick.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourceticktickauthenticationtype.md b/docs/models/sourceticktickauthenticationtype.md new file mode 100644 index 00000000..b41a803a --- /dev/null +++ b/docs/models/sourceticktickauthenticationtype.md @@ -0,0 +1,17 @@ +# SourceTicktickAuthenticationType + + +## Supported Types + +### `models.OAuth2` + +```python +value: models.OAuth2 = /* values here */ +``` + +### `models.BearerTokenFromOauth2` + +```python +value: models.BearerTokenFromOauth2 = /* values here */ +``` + diff --git a/docs/models/sourceticktickauthtype.md b/docs/models/sourceticktickauthtype.md new file mode 100644 index 00000000..166efdfa --- /dev/null +++ b/docs/models/sourceticktickauthtype.md @@ -0,0 +1,8 @@ +# SourceTicktickAuthType + + +## Values + +| Name | Value | +| ------- | ------- | +| `OAUTH` | Oauth | \ No newline at end of file diff --git a/docs/models/sourceticktickschemasauthtype.md b/docs/models/sourceticktickschemasauthtype.md new file mode 100644 index 00000000..ed617402 --- /dev/null +++ b/docs/models/sourceticktickschemasauthtype.md @@ -0,0 +1,8 @@ +# SourceTicktickSchemasAuthType + + +## Values + +| Name | Value | +| ------- | ------- | +| `TOKEN` | Token | \ No newline at end of file diff --git a/docs/models/sourceticktickticktick.md b/docs/models/sourceticktickticktick.md new file mode 100644 index 00000000..6f3138c8 --- /dev/null +++ b/docs/models/sourceticktickticktick.md @@ -0,0 +1,8 @@ +# SourceTicktickTicktick + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `TICKTICK` | ticktick | \ No newline at end of file diff --git a/docs/models/sourcetwilio.md b/docs/models/sourcetwilio.md index 8ad39708..c33b2868 100644 --- a/docs/models/sourcetwilio.md +++ b/docs/models/sourcetwilio.md @@ -9,4 +9,5 @@ | `auth_token` | *str* | :heavy_check_mark: | Twilio Auth Token. | | | `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | UTC date and time in the format 2020-10-01T00:00:00Z. Any data before this date will not be replicated. | 2020-10-01T00:00:00Z | | `lookback_window` | *Optional[int]* | :heavy_minus_sign: | How far into the past to look for records. (in minutes) | 60 | +| `num_worker` | *Optional[int]* | :heavy_minus_sign: | The number of worker threads to use for the sync. | 1 | | `source_type` | [models.Twilio](../models/twilio.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourceuptick.md b/docs/models/sourceuptick.md index ca7eae1e..67bc09fb 100644 --- a/docs/models/sourceuptick.md +++ b/docs/models/sourceuptick.md @@ -3,14 +3,11 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------- | -| `base_url` | *str* | :heavy_check_mark: | Ex: https://demo-fire.onuptick.com/ | -| `client_id` | *str* | :heavy_check_mark: | N/A | -| `client_refresh_token` | *str* | :heavy_check_mark: | N/A | -| `client_secret` | *str* | :heavy_check_mark: | N/A | -| `end_date` | *Optional[str]* | :heavy_minus_sign: | Fetch data up until this date | -| `oauth_access_token` | *Optional[str]* | :heavy_minus_sign: | The current access token. This field might be overridden by the connector based on the token refresh endpoint response. | -| `oauth_token_expiry_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date the current access token expires in. This field might be overridden by the connector based on the token refresh endpoint response. | -| `source_type` | [models.Uptick](../models/uptick.md) | :heavy_check_mark: | N/A | -| `start_date` | *Optional[str]* | :heavy_minus_sign: | Fetch data starting from this date (by default 2025-01-01) | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------ | ------------------------------------------------------ | ------------------------------------------------------ | ------------------------------------------------------ | +| `base_url` | *str* | :heavy_check_mark: | eg. https://demo-fire.onuptick.com (no trailing slash) | +| `client_id` | *str* | :heavy_check_mark: | N/A | +| `client_secret` | *str* | :heavy_check_mark: | N/A | +| `password` | *str* | :heavy_check_mark: | N/A | +| `username` | *str* | :heavy_check_mark: | N/A | +| `source_type` | [models.Uptick](../models/uptick.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceworkday.md b/docs/models/sourceworkday.md index d4e594d5..1dd7413d 100644 --- a/docs/models/sourceworkday.md +++ b/docs/models/sourceworkday.md @@ -3,9 +3,11 @@ ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `credentials` | [models.SourceWorkdayAuthentication](../models/sourceworkdayauthentication.md) | :heavy_check_mark: | Report Based Streams and REST API Streams use different methods of Authentication. Choose streams type you want to sync and provide needed credentials for them. | -| `host` | *str* | :heavy_check_mark: | N/A | -| `tenant_id` | *str* | :heavy_check_mark: | N/A | -| `source_type` | [models.Workday](../models/workday.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `credentials` | [models.SourceWorkdayAuthentication](../models/sourceworkdayauthentication.md) | :heavy_check_mark: | Credentials for connecting to the Workday (RAAS) API. | | +| `host` | *str* | :heavy_check_mark: | N/A | | +| `report_ids` | List[[models.ReportIds](../models/reportids.md)] | :heavy_check_mark: | Report IDs can be found by clicking the three dots on the right side of the report > Web Service > View URLs > in JSON url copy everything between Workday tenant/ and ?format=json. | for JSON url https://hostname/ccx/service/customreport2/tenant/report/id?format=json Report ID is report/id. | +| `tenant_id` | *str* | :heavy_check_mark: | N/A | | +| `num_workers` | *Optional[int]* | :heavy_minus_sign: | The number of worker threads to use for the sync. | 1 | +| `source_type` | [models.Workday](../models/workday.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourceworkdayauthentication.md b/docs/models/sourceworkdayauthentication.md index 106a3f48..79fd857a 100644 --- a/docs/models/sourceworkdayauthentication.md +++ b/docs/models/sourceworkdayauthentication.md @@ -1,19 +1,11 @@ # SourceWorkdayAuthentication -Report Based Streams and REST API Streams use different methods of Authentication. Choose streams type you want to sync and provide needed credentials for them. +Credentials for connecting to the Workday (RAAS) API. -## Supported Types - -### `models.ReportBasedStreams` - -```python -value: models.ReportBasedStreams = /* values here */ -``` - -### `models.RESTAPIStreams` - -```python -value: models.RESTAPIStreams = /* values here */ -``` +## Fields +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `password` | *str* | :heavy_check_mark: | N/A | +| `username` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceworkdayrest.md b/docs/models/sourceworkdayrest.md new file mode 100644 index 00000000..c10ae3a5 --- /dev/null +++ b/docs/models/sourceworkdayrest.md @@ -0,0 +1,13 @@ +# SourceWorkdayRest + + +## Fields + +| Field | Type | Required | Description | Example | +| -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------- | +| `credentials` | [models.SourceWorkdayRestAuthentication](../models/sourceworkdayrestauthentication.md) | :heavy_check_mark: | Credentials for connecting to the Workday (REST) API. | | +| `host` | *str* | :heavy_check_mark: | N/A | | +| `tenant_id` | *str* | :heavy_check_mark: | N/A | | +| `num_workers` | *Optional[int]* | :heavy_minus_sign: | The number of worker threads to use for the sync. | 1 | +| `source_type` | [models.WorkdayRest](../models/workdayrest.md) | :heavy_check_mark: | N/A | | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | Rows after this date will be synced, default 2 years ago. | 2024-10-26T07:00:00.000Z | \ No newline at end of file diff --git a/docs/models/sourceworkdayrestauthentication.md b/docs/models/sourceworkdayrestauthentication.md new file mode 100644 index 00000000..e1945519 --- /dev/null +++ b/docs/models/sourceworkdayrestauthentication.md @@ -0,0 +1,10 @@ +# SourceWorkdayRestAuthentication + +Credentials for connecting to the Workday (REST) API. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | +| `access_token` | *str* | :heavy_check_mark: | Follow the instructions in the "OAuth 2.0 in Postman - API Client for Integrations" article in the Workday community docs to obtain access token. | \ No newline at end of file diff --git a/docs/models/streammappertype.md b/docs/models/streammappertype.md index 535eb1a7..e6056495 100644 --- a/docs/models/streammappertype.md +++ b/docs/models/streammappertype.md @@ -3,9 +3,10 @@ ## Values -| Name | Value | -| ---------------- | ---------------- | -| `HASHING` | hashing | -| `FIELD_RENAMING` | field-renaming | -| `ROW_FILTERING` | row-filtering | -| `ENCRYPTION` | encryption | \ No newline at end of file +| Name | Value | +| ----------------- | ----------------- | +| `HASHING` | hashing | +| `FIELD_RENAMING` | field-renaming | +| `ROW_FILTERING` | row-filtering | +| `ENCRYPTION` | encryption | +| `FIELD_FILTERING` | field-filtering | \ No newline at end of file diff --git a/docs/models/tablefilter.md b/docs/models/tablefilter.md new file mode 100644 index 00000000..e2df9b2a --- /dev/null +++ b/docs/models/tablefilter.md @@ -0,0 +1,12 @@ +# TableFilter + +Inclusion filter configuration for table selection per schema. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------- | +| `schema_name` | *str* | :heavy_check_mark: | The name of the schema to apply this filter to. Should match a schema defined in "Schemas" field above. | +| `table_name_patterns` | List[*str*] | :heavy_check_mark: | List of table name patterns to include from this schema. Should be a SQL LIKE pattern. | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/ticktick.md b/docs/models/ticktick.md new file mode 100644 index 00000000..5dd90e01 --- /dev/null +++ b/docs/models/ticktick.md @@ -0,0 +1,8 @@ +# Ticktick + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | +| `authorization` | [Optional[models.TicktickAuthorization]](../models/ticktickauthorization.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/ticktickauthorization.md b/docs/models/ticktickauthorization.md new file mode 100644 index 00000000..8921c941 --- /dev/null +++ b/docs/models/ticktickauthorization.md @@ -0,0 +1,9 @@ +# TicktickAuthorization + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `client_id` | *Optional[str]* | :heavy_minus_sign: | The client ID of your Ticktick application. Read more here. | +| `client_secret` | *Optional[str]* | :heavy_minus_sign: | The client secret of of your Ticktick application. application. Read more here. | \ No newline at end of file diff --git a/docs/models/updatemethod.md b/docs/models/updatemethod.md index 0a6f98d1..ee64e9f8 100644 --- a/docs/models/updatemethod.md +++ b/docs/models/updatemethod.md @@ -5,15 +5,15 @@ Configures how data is extracted from the database. ## Supported Types -### `models.ReadChangesUsingChangeDataCaptureCDC` +### `models.ScanChangesWithUserDefinedCursor` ```python -value: models.ReadChangesUsingChangeDataCaptureCDC = /* values here */ +value: models.ScanChangesWithUserDefinedCursor = /* values here */ ``` -### `models.ScanChangesWithUserDefinedCursor` +### `models.ReadChangesUsingChangeDataCaptureCDC` ```python -value: models.ScanChangesWithUserDefinedCursor = /* values here */ +value: models.ReadChangesUsingChangeDataCaptureCDC = /* values here */ ``` diff --git a/docs/models/usernameandpassword.md b/docs/models/usernameandpassword.md index 3f6f9370..423a81a4 100644 --- a/docs/models/usernameandpassword.md +++ b/docs/models/usernameandpassword.md @@ -1,9 +1,12 @@ # UsernameAndPassword +Configuration details for the Username and Password Authentication. + ## Fields | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- | | `password` | *str* | :heavy_check_mark: | Enter the password associated with the username. | +| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | | `auth_type` | [Optional[models.DestinationSnowflakeSchemasAuthType]](../models/destinationsnowflakeschemasauthtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/validbreakdowns.md b/docs/models/validbreakdowns.md index 654d6ac7..1bef9d70 100644 --- a/docs/models/validbreakdowns.md +++ b/docs/models/validbreakdowns.md @@ -7,26 +7,36 @@ An enumeration. | Name | Value | | ------------------------------------------------- | ------------------------------------------------- | +| `AD_EXTENSION_DOMAIN` | ad_extension_domain | +| `AD_EXTENSION_URL` | ad_extension_url | | `AD_FORMAT_ASSET` | ad_format_asset | | `AGE` | age | | `APP_ID` | app_id | | `BODY_ASSET` | body_asset | +| `BREAKDOWN_AD_OBJECTIVE` | breakdown_ad_objective | | `BREAKDOWN_REPORTING_AD_ID` | breakdown_reporting_ad_id | | `CALL_TO_ACTION_ASSET` | call_to_action_asset | | `COARSE_CONVERSION_VALUE` | coarse_conversion_value | +| `COMSCORE_MARKET` | comscore_market | +| `COMSCORE_MARKET_CODE` | comscore_market_code | | `CONVERSION_DESTINATION` | conversion_destination | | `COUNTRY` | country | +| `CREATIVE_RELAXATION_ASSET_TYPE` | creative_relaxation_asset_type | | `DESCRIPTION_ASSET` | description_asset | | `DEVICE_PLATFORM` | device_platform | | `DMA` | dma | | `FIDELITY_TYPE` | fidelity_type | +| `FLEXIBLE_FORMAT_ASSET_TYPE` | flexible_format_asset_type | | `FREQUENCY_VALUE` | frequency_value | +| `GEN_AI_ASSET_TYPE` | gen_ai_asset_type | | `GENDER` | gender | | `HOURLY_STATS_AGGREGATED_BY_ADVERTISER_TIME_ZONE` | hourly_stats_aggregated_by_advertiser_time_zone | | `HOURLY_STATS_AGGREGATED_BY_AUDIENCE_TIME_ZONE` | hourly_stats_aggregated_by_audience_time_zone | | `HSID` | hsid | | `IMAGE_ASSET` | image_asset | | `IMPRESSION_DEVICE` | impression_device | +| `IMPRESSION_VIEW_TIME_ADVERTISER_HOUR_V2` | impression_view_time_advertiser_hour_v2 | +| `IS_AUTO_ADVANCE` | is_auto_advance | | `IS_CONVERSION_ID_MODELED` | is_conversion_id_modeled | | `IS_RENDERED_AS_DELAYED_SKIP_AD` | is_rendered_as_delayed_skip_ad | | `LANDING_DESTINATION` | landing_destination | @@ -61,4 +71,5 @@ An enumeration. | `TITLE_ASSET` | title_asset | | `USER_PERSONA_ID` | user_persona_id | | `USER_PERSONA_NAME` | user_persona_name | -| `VIDEO_ASSET` | video_asset | \ No newline at end of file +| `VIDEO_ASSET` | video_asset | +| `USER_SEGMENT_KEY` | user_segment_key | \ No newline at end of file diff --git a/docs/models/workdayrest.md b/docs/models/workdayrest.md new file mode 100644 index 00000000..65f2eee4 --- /dev/null +++ b/docs/models/workdayrest.md @@ -0,0 +1,8 @@ +# WorkdayRest + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `WORKDAY_REST` | workday-rest | \ No newline at end of file diff --git a/docs/sdks/sources/README.md b/docs/sdks/sources/README.md index 9f737009..55df78c3 100644 --- a/docs/sdks/sources/README.md +++ b/docs/sdks/sources/README.md @@ -34,9 +34,11 @@ s = airbyte_api.AirbyteAPI( res = s.sources.create_source(request=models.SourceCreateRequest( - configuration=models.SourcePlausible( + configuration=models.SourcePlaid( + access_token='', api_key='', - site_id='docs.airbyte.com', + client_id='', + plaid_env=models.PlaidEnvironment.PRODUCTION, ), name='My Source', workspace_id='744cc0ed-7f05-4949-9e60-2a814f90c035', @@ -279,8 +281,10 @@ s = airbyte_api.AirbyteAPI( res = s.sources.patch_source(request=api.PatchSourceRequest( source_id='', source_patch_request=models.SourcePatchRequest( - configuration=models.SourceEncharge( - api_key='', + configuration=models.SourceDynamodb( + endpoint='', + ignore_missing_read_permissions_tables=False, + region=models.SourceDynamodbDynamodbRegion.AP_SOUTH_2, ), name='My Source', workspace_id='744cc0ed-7f05-4949-9e60-2a814f90c035', @@ -332,9 +336,10 @@ s = airbyte_api.AirbyteAPI( res = s.sources.put_source(request=api.PutSourceRequest( source_id='', source_put_request=models.SourcePutRequest( - configuration=models.SourceGridly( - api_key='', - grid_id='', + configuration=models.SourceGreythr( + base_url='https://amazing-basket.info', + domain='whimsical-overheard.com', + username='Euna.Hickle', ), name='My Source', ), diff --git a/gen.yaml b/gen.yaml index 5b5b7466..6c4c6821 100644 --- a/gen.yaml +++ b/gen.yaml @@ -13,7 +13,7 @@ generation: oAuth2ClientCredentialsEnabled: true oAuth2PasswordEnabled: false python: - version: 0.53.0 + version: 0.53.1 additionalDependencies: dependencies: {} extraDependencies: diff --git a/setup.py b/setup.py index ea3e63e4..afdb1248 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ setuptools.setup( name='airbyte-api', - version='0.53.0', + version='0.53.1', author='Airbyte', description='Python Client SDK for Airbyte API', url='https://github.com/airbytehq/airbyte-api-python-sdk.git', diff --git a/src/airbyte_api/models/__init__.py b/src/airbyte_api/models/__init__.py index 6833625b..5463cf2a 100644 --- a/src/airbyte_api/models/__init__.py +++ b/src/airbyte_api/models/__init__.py @@ -143,6 +143,8 @@ from .source_100ms import * from .source_7shifts import * from .source_activecampaign import * +from .source_acuity_scheduling import * +from .source_adobe_commerce_magento import * from .source_agilecrm import * from .source_aha import * from .source_airbyte import * @@ -241,8 +243,11 @@ from .source_customer_io import * from .source_customerly import * from .source_datadog import * +from .source_datagen import * from .source_datascope import * +from .source_db2_enterprise import * from .source_dbt import * +from .source_defillama import * from .source_delighted import * from .source_deputy import * from .source_ding_connect import * @@ -403,6 +408,7 @@ from .source_mailjet_sms import * from .source_mailosaur import * from .source_mailtrap import * +from .source_mantle import * from .source_marketo import * from .source_marketstack import * from .source_mendeley import * @@ -410,6 +416,7 @@ from .source_mercado_ads import * from .source_merge import * from .source_metabase import * +from .source_metricool import * from .source_microsoft_dataverse import * from .source_microsoft_entra_id import * from .source_microsoft_lists import * @@ -463,6 +470,7 @@ from .source_orb import * from .source_oura import * from .source_outbrain_amplify import * +from .source_outlook import * from .source_outreach import * from .source_oveit import * from .source_pabbly_subscriptions_billing import * @@ -609,6 +617,7 @@ from .source_thrive_learning import * from .source_ticketmaster import * from .source_tickettailor import * +from .source_ticktick import * from .source_tiktok_marketing import * from .source_timely import * from .source_tinyemail import * @@ -651,6 +660,7 @@ from .source_wordpress import * from .source_workable import * from .source_workday import * +from .source_workday_rest import * from .source_workflowmax import * from .source_workramp import * from .source_wrike import * @@ -702,6 +712,7 @@ from .tagpatchrequest import * from .tagresponse import * from .tagsresponse import * +from .ticktick import * from .tiktok_marketing import * from .typeform import * from .updatedeclarativesourcedefinitionrequest import * diff --git a/src/airbyte_api/models/connectionresponse.py b/src/airbyte_api/models/connectionresponse.py index f9bcac74..cd4f69ab 100644 --- a/src/airbyte_api/models/connectionresponse.py +++ b/src/airbyte_api/models/connectionresponse.py @@ -35,5 +35,6 @@ class ConnectionResponse: non_breaking_schema_updates_behavior: Optional[NonBreakingSchemaUpdatesBehaviorEnum] = dataclasses.field(default=NonBreakingSchemaUpdatesBehaviorEnum.IGNORE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('nonBreakingSchemaUpdatesBehavior'), 'exclude': lambda f: f is None }}) r"""Set how Airbyte handles syncs when it detects a non-breaking schema change in the source""" prefix: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('prefix'), 'exclude': lambda f: f is None }}) + status_reason: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('statusReason'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/connectionstatusenum.py b/src/airbyte_api/models/connectionstatusenum.py index 3dbcf270..32173010 100644 --- a/src/airbyte_api/models/connectionstatusenum.py +++ b/src/airbyte_api/models/connectionstatusenum.py @@ -8,3 +8,4 @@ class ConnectionStatusEnum(str, Enum): ACTIVE = 'active' INACTIVE = 'inactive' DEPRECATED = 'deprecated' + LOCKED = 'locked' diff --git a/src/airbyte_api/models/destination_azure_blob_storage.py b/src/airbyte_api/models/destination_azure_blob_storage.py index e307426d..75c02a17 100644 --- a/src/airbyte_api/models/destination_azure_blob_storage.py +++ b/src/airbyte_api/models/destination_azure_blob_storage.py @@ -60,14 +60,20 @@ class DestinationAzureBlobStorage: format: OutputFormat = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format') }}) r"""Format of the data output.""" azure_blob_storage_account_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_account_key'), 'exclude': lambda f: f is None }}) - r"""The Azure blob storage account key. If you set this value, you must not set the Shared Access Signature.""" + r"""The Azure Blob Storage account key. If you set this value, you must not set the \\"Shared Access Signature\\", \\"Azure Tenant ID\\", \\"Azure Client ID\\", or \\"Azure Client Secret\\" fields.""" azure_blob_storage_endpoint_domain_name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_endpoint_domain_name'), 'exclude': lambda f: f is None }}) r"""This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.""" azure_blob_storage_spill_size: Optional[int] = dataclasses.field(default=500, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_blob_storage_spill_size'), 'exclude': lambda f: f is None }}) r"""The amount of megabytes after which the connector should spill the records in a new blob object. Make sure to configure size greater than individual records. Enter 0 if not applicable.""" + azure_client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_client_id'), 'exclude': lambda f: f is None }}) + r"""The Azure Active Directory (Entra ID) client ID. Required for Entra ID authentication.""" + azure_client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_client_secret'), 'exclude': lambda f: f is None }}) + r"""The Azure Active Directory (Entra ID) client secret. Required for Entra ID authentication.""" + azure_tenant_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('azure_tenant_id'), 'exclude': lambda f: f is None }}) + r"""The Azure Active Directory (Entra ID) tenant ID. Required for Entra ID authentication.""" DESTINATION_TYPE: Final[DestinationAzureBlobStorageAzureBlobStorage] = dataclasses.field(default=DestinationAzureBlobStorageAzureBlobStorage.AZURE_BLOB_STORAGE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) shared_access_signature: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('shared_access_signature'), 'exclude': lambda f: f is None }}) - r"""A shared access signature (SAS) provides secure delegated access to resources in your storage account. Read more here. If you set this value, you must not set the account key.""" + r"""A shared access signature (SAS) provides secure delegated access to resources in your storage account. Read more here. If you set this value, you must not set the \\"Azure Blob Storage Account Key\\", \\"Azure Tenant ID\\", \\"Azure Client ID\\", or \\"Azure Client Secret\\" fields.""" diff --git a/src/airbyte_api/models/destination_customer_io.py b/src/airbyte_api/models/destination_customer_io.py index b0966b88..33c24aff 100644 --- a/src/airbyte_api/models/destination_customer_io.py +++ b/src/airbyte_api/models/destination_customer_io.py @@ -109,8 +109,8 @@ class DestinationCustomerIo: credentials: DestinationCustomerIoCredentials = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }}) r"""Enter the site ID and API key to authenticate.""" DESTINATION_TYPE: Final[CustomerIo] = dataclasses.field(default=CustomerIo.CUSTOMER_IO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) - object_storage_config: Optional[ObjectStorageConfiguration] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('object_storage_config'), 'exclude': lambda f: f is None }}) + object_storage_config: Optional[ObjectStorageSpec] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('object_storage_config'), 'exclude': lambda f: f is None }}) -ObjectStorageConfiguration = Union[NoneT, DestinationCustomerIoS3] +ObjectStorageSpec = Union[NoneT, DestinationCustomerIoS3] diff --git a/src/airbyte_api/models/destination_hubspot.py b/src/airbyte_api/models/destination_hubspot.py index 554e6af8..0671b44c 100644 --- a/src/airbyte_api/models/destination_hubspot.py +++ b/src/airbyte_api/models/destination_hubspot.py @@ -115,10 +115,10 @@ class DestinationHubspot: credentials: DestinationHubspotCredentials = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }}) r"""Choose how to authenticate to HubSpot.""" DESTINATION_TYPE: Final[DestinationHubspotHubspot] = dataclasses.field(default=DestinationHubspotHubspot.HUBSPOT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) - object_storage_config: Optional[DestinationHubspotObjectStorageConfiguration] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('object_storage_config'), 'exclude': lambda f: f is None }}) + object_storage_config: Optional[ObjectStorageConfiguration] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('object_storage_config'), 'exclude': lambda f: f is None }}) DestinationHubspotCredentials = Union[OAuth] -DestinationHubspotObjectStorageConfiguration = Union[DestinationHubspotNone, DestinationHubspotS3] +ObjectStorageConfiguration = Union[DestinationHubspotNone, DestinationHubspotS3] diff --git a/src/airbyte_api/models/destination_s3_data_lake.py b/src/airbyte_api/models/destination_s3_data_lake.py index 826a069c..649a4de8 100644 --- a/src/airbyte_api/models/destination_s3_data_lake.py +++ b/src/airbyte_api/models/destination_s3_data_lake.py @@ -8,6 +8,33 @@ from typing import Any, Dict, Final, Optional, Union +class DestinationS3DataLakeSchemasCatalogTypeCatalogType(str, Enum): + POLARIS = 'POLARIS' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class PolarisCatalog: + r"""Configuration details for connecting to an Apache Polaris-based Iceberg catalog.""" + catalog_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('catalog_name') }}) + r"""The name of the catalog in Polaris. This corresponds to the catalog name created via the Polaris Management API.""" + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + r"""The OAuth Client ID for authenticating with the Polaris server.""" + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + r"""The OAuth Client Secret for authenticating with the Polaris server.""" + namespace: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('namespace') }}) + r"""The Polaris namespace to be used in the Table identifier. + This will ONLY be used if the `Destination Namespace` setting for the connection is set to + `Destination-defined` or `Source-defined` + """ + server_uri: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('server_uri') }}) + r"""The base URL of the Polaris server used to connect to the Polaris catalog.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + catalog_type: Optional[DestinationS3DataLakeSchemasCatalogTypeCatalogType] = dataclasses.field(default=DestinationS3DataLakeSchemasCatalogTypeCatalogType.POLARIS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('catalog_type'), 'exclude': lambda f: f is None }}) + + + + class DestinationS3DataLakeSchemasCatalogType(str, Enum): REST = 'REST' @@ -49,7 +76,7 @@ class GlueCatalog: -class DestinationS3DataLakeSchemasCatalogTypeCatalogType(str, Enum): +class DestinationS3DataLakeSchemasCatalogTypeCatalogTypeCatalogType(str, Enum): NESSIE = 'NESSIE' @@ -67,7 +94,7 @@ class NessieCatalog: additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token'), 'exclude': lambda f: f is None }}) r"""Optional token for authentication with the Nessie server.""" - catalog_type: Optional[DestinationS3DataLakeSchemasCatalogTypeCatalogType] = dataclasses.field(default=DestinationS3DataLakeSchemasCatalogTypeCatalogType.NESSIE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('catalog_type'), 'exclude': lambda f: f is None }}) + catalog_type: Optional[DestinationS3DataLakeSchemasCatalogTypeCatalogTypeCatalogType] = dataclasses.field(default=DestinationS3DataLakeSchemasCatalogTypeCatalogTypeCatalogType.NESSIE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('catalog_type'), 'exclude': lambda f: f is None }}) @@ -119,7 +146,7 @@ class DestinationS3DataLakeS3BucketRegion(str, Enum): class DestinationS3DataLake: r"""Defines the configurations required to connect to an Iceberg catalog, including warehouse location, main branch name, and catalog type specifics.""" catalog_type: CatalogType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('catalog_type') }}) - r"""Specifies the type of Iceberg catalog (e.g., NESSIE, GLUE, REST) and its associated configuration.""" + r"""Specifies the type of Iceberg catalog (e.g., NESSIE, GLUE, REST, POLARIS) and its associated configuration.""" s3_bucket_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_name') }}) r"""The name of the S3 bucket that will host the Iceberg data.""" s3_bucket_region: DestinationS3DataLakeS3BucketRegion = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('s3_bucket_region') }}) @@ -138,4 +165,4 @@ class DestinationS3DataLake: -CatalogType = Union[NessieCatalog, GlueCatalog, RestCatalog] +CatalogType = Union[NessieCatalog, GlueCatalog, RestCatalog, PolarisCatalog] diff --git a/src/airbyte_api/models/destination_salesforce.py b/src/airbyte_api/models/destination_salesforce.py index 971dbab8..dc58e8cd 100644 --- a/src/airbyte_api/models/destination_salesforce.py +++ b/src/airbyte_api/models/destination_salesforce.py @@ -107,8 +107,8 @@ class DestinationSalesforce: DESTINATION_TYPE: Final[DestinationSalesforceSalesforce] = dataclasses.field(default=DestinationSalesforceSalesforce.SALESFORCE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) is_sandbox: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('is_sandbox'), 'exclude': lambda f: f is None }}) r"""Toggle if you're using a Salesforce Sandbox.""" - object_storage_config: Optional[DestinationSalesforceObjectStorageConfiguration] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('object_storage_config'), 'exclude': lambda f: f is None }}) + object_storage_config: Optional[DestinationSalesforceObjectStorageSpec] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('object_storage_config'), 'exclude': lambda f: f is None }}) -DestinationSalesforceObjectStorageConfiguration = Union[DestinationSalesforceNone, DestinationSalesforceS3] +DestinationSalesforceObjectStorageSpec = Union[DestinationSalesforceNone, DestinationSalesforceS3] diff --git a/src/airbyte_api/models/destination_snowflake.py b/src/airbyte_api/models/destination_snowflake.py index cc220519..e3c9f1f9 100644 --- a/src/airbyte_api/models/destination_snowflake.py +++ b/src/airbyte_api/models/destination_snowflake.py @@ -5,27 +5,13 @@ from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json from enum import Enum -from typing import Final, Optional, Union +from typing import Any, Dict, Final, Optional, Union -class DestinationSnowflakeSchemasCredentialsAuthType(str, Enum): - O_AUTH2_0 = 'OAuth2.0' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DestinationSnowflakeOAuth20: - access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) - r"""Enter you application's Access Token""" - refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }}) - r"""Enter your application's Refresh Token""" - AUTH_TYPE: Final[Optional[DestinationSnowflakeSchemasCredentialsAuthType]] = dataclasses.field(default=DestinationSnowflakeSchemasCredentialsAuthType.O_AUTH2_0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) - client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }}) - r"""Enter your application's Client ID""" - client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }}) - r"""Enter your application's Client secret""" - - +class DestinationSnowflakeCDCDeletionMode(str, Enum): + r"""Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes.""" + HARD_DELETE = 'Hard delete' + SOFT_DELETE = 'Soft delete' class DestinationSnowflakeSchemasAuthType(str, Enum): @@ -35,9 +21,11 @@ class DestinationSnowflakeSchemasAuthType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class UsernameAndPassword: + r"""Configuration details for the Username and Password Authentication.""" password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) r"""Enter the password associated with the username.""" - AUTH_TYPE: Final[Optional[DestinationSnowflakeSchemasAuthType]] = dataclasses.field(default=DestinationSnowflakeSchemasAuthType.USERNAME_AND_PASSWORD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + auth_type: Optional[DestinationSnowflakeSchemasAuthType] = dataclasses.field(default=DestinationSnowflakeSchemasAuthType.USERNAME_AND_PASSWORD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) @@ -49,9 +37,14 @@ class DestinationSnowflakeAuthType(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class KeyPairAuthentication: + r"""Configuration details for the Key Pair Authentication.""" private_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('private_key') }}) - r"""RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key.""" - AUTH_TYPE: Final[Optional[DestinationSnowflakeAuthType]] = dataclasses.field(default=DestinationSnowflakeAuthType.KEY_PAIR_AUTHENTICATION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) + r"""RSA Private key to use for Snowflake connection. See the docs for more + information on how to obtain this key. + """ + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + auth_type: Optional[DestinationSnowflakeAuthType] = dataclasses.field(default=DestinationSnowflakeAuthType.KEY_PAIR_AUTHENTICATION, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) private_key_password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('private_key_password'), 'exclude': lambda f: f is None }}) r"""Passphrase for private key""" @@ -77,19 +70,20 @@ class DestinationSnowflake: r"""Enter the name of the user you want to use to access the database""" warehouse: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('warehouse') }}) r"""Enter the name of the warehouse that you want to use as a compute cluster""" + cdc_deletion_mode: Optional[DestinationSnowflakeCDCDeletionMode] = dataclasses.field(default=DestinationSnowflakeCDCDeletionMode.HARD_DELETE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cdc_deletion_mode'), 'exclude': lambda f: f is None }}) + r"""Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes.""" credentials: Optional[AuthorizationMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) + r"""Determines the type of authentication that should be used.""" DESTINATION_TYPE: Final[Snowflake] = dataclasses.field(default=Snowflake.SNOWFLAKE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) - disable_type_dedupe: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('disable_type_dedupe'), 'exclude': lambda f: f is None }}) - r"""Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions""" + disable_type_dedupe: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('disable_type_dedupe'), 'exclude': lambda f: f is None }}) + r"""Write the legacy \\"raw tables\\" format, to enable backwards compatibility with older versions of this connector.""" jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) r"""Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3""" raw_data_schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('raw_data_schema'), 'exclude': lambda f: f is None }}) - r"""The schema to write raw tables into (default: airbyte_internal)""" - retention_period_days: Optional[int] = dataclasses.field(default=1, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('retention_period_days'), 'exclude': lambda f: f is None }}) + r"""Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to \\"airbyte_internal\\".""" + retention_period_days: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('retention_period_days'), 'exclude': lambda f: f is None }}) r"""The number of days of Snowflake Time Travel to enable on the tables. See Snowflake's documentation for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance.""" - use_merge_for_upsert: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('use_merge_for_upsert'), 'exclude': lambda f: f is None }}) - r"""Use MERGE for de-duplication of final tables. This option no effect if Final tables are disabled or Sync mode is not DEDUPE""" -AuthorizationMethod = Union[KeyPairAuthentication, UsernameAndPassword, DestinationSnowflakeOAuth20] +AuthorizationMethod = Union[KeyPairAuthentication, UsernameAndPassword] diff --git a/src/airbyte_api/models/mapperconfiguration.py b/src/airbyte_api/models/mapperconfiguration.py index 8e963b87..f458cdda 100644 --- a/src/airbyte_api/models/mapperconfiguration.py +++ b/src/airbyte_api/models/mapperconfiguration.py @@ -67,6 +67,15 @@ class FieldRenaming: +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class FieldFiltering: + target_field: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('targetField') }}) + r"""The name of the field to filter.""" + + + + class HashingMethod(str, Enum): r"""The hashing algorithm to use.""" MD2 = 'MD2' @@ -92,4 +101,4 @@ class Hashing: Encryption = Union[EncryptionRSA, EncryptionAES] -MapperConfiguration = Union[Hashing, FieldRenaming, RowFiltering, Encryption] +MapperConfiguration = Union[Hashing, FieldFiltering, FieldRenaming, RowFiltering, Encryption] diff --git a/src/airbyte_api/models/oauthactornames.py b/src/airbyte_api/models/oauthactornames.py index 6d31de78..9f7a12fb 100644 --- a/src/airbyte_api/models/oauthactornames.py +++ b/src/airbyte_api/models/oauthactornames.py @@ -41,6 +41,7 @@ class OAuthActorNames(str, Enum): SMARTSHEETS = 'smartsheets' SNAPCHAT_MARKETING = 'snapchat-marketing' SURVEYMONKEY = 'surveymonkey' + TICKTICK = 'ticktick' TIKTOK_MARKETING = 'tiktok-marketing' TRELLO = 'trello' TYPEFORM = 'typeform' diff --git a/src/airbyte_api/models/source_acuity_scheduling.py b/src/airbyte_api/models/source_acuity_scheduling.py new file mode 100644 index 00000000..14100e13 --- /dev/null +++ b/src/airbyte_api/models/source_acuity_scheduling.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class AcuityScheduling(str, Enum): + ACUITY_SCHEDULING = 'acuity-scheduling' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceAcuityScheduling: + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + SOURCE_TYPE: Final[AcuityScheduling] = dataclasses.field(default=AcuityScheduling.ACUITY_SCHEDULING, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_adobe_commerce_magento.py b/src/airbyte_api/models/source_adobe_commerce_magento.py new file mode 100644 index 00000000..d1d19241 --- /dev/null +++ b/src/airbyte_api/models/source_adobe_commerce_magento.py @@ -0,0 +1,28 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class AdobeCommerceMagento(str, Enum): + ADOBE_COMMERCE_MAGENTO = 'adobe-commerce-magento' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceAdobeCommerceMagento: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + store_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('store_host') }}) + r"""magento.mystore.com""" + api_version: Optional[str] = dataclasses.field(default='V1', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_version'), 'exclude': lambda f: f is None }}) + r"""V1""" + SOURCE_TYPE: Final[AdobeCommerceMagento] = dataclasses.field(default=AdobeCommerceMagento.ADOBE_COMMERCE_MAGENTO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_azure_blob_storage.py b/src/airbyte_api/models/source_azure_blob_storage.py index 8953169d..0a25f5c7 100644 --- a/src/airbyte_api/models/source_azure_blob_storage.py +++ b/src/airbyte_api/models/source_azure_blob_storage.py @@ -66,6 +66,18 @@ class SourceAzureBlobStorageAzureBlobStorage(str, Enum): AZURE_BLOB_STORAGE = 'azure-blob-storage' +class SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype(str, Enum): + EXCEL = 'excel' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class ExcelFormat: + FILETYPE: Final[Optional[SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype]] = dataclasses.field(default=SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype.EXCEL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) + + + + class SourceAzureBlobStorageSchemasStreamsFormatFiletype(str, Enum): UNSTRUCTURED = 'unstructured' @@ -280,4 +292,4 @@ class SourceAzureBlobStorage: CSVHeaderDefinition = Union[FromCSV, Autogenerated, UserProvided] -Format = Union[AvroFormat, CSVFormat, JsonlFormat, ParquetFormat, UnstructuredDocumentFormat] +Format = Union[AvroFormat, CSVFormat, JsonlFormat, ParquetFormat, UnstructuredDocumentFormat, ExcelFormat] diff --git a/src/airbyte_api/models/source_bing_ads.py b/src/airbyte_api/models/source_bing_ads.py index 4349fb54..9162b551 100644 --- a/src/airbyte_api/models/source_bing_ads.py +++ b/src/airbyte_api/models/source_bing_ads.py @@ -78,6 +78,8 @@ class CustomReportConfig: r"""A list of available report object columns. You can find it in description of reporting object that you want to add to custom report.""" reporting_object: ReportingDataObject = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('reporting_object') }}) r"""The name of the the object derives from the ReportRequest object. You can find it in Bing Ads Api docs - Reporting API - Reporting Data Objects.""" + disable_custom_report_names_camel_to_snake_conversion: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('disable_custom_report_names_camel_to_snake_conversion'), 'exclude': lambda f: f is None }}) + r"""When enabled, disables the automatic conversion of custom report names from camelCase to snake_case. By default, custom report names are automatically converted (e.g., 'MyCustomReport' becomes 'my_custom_report'). Enable this option if you want to use the exact report names you specify.""" report_aggregation: Optional[str] = dataclasses.field(default='[Hourly]', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('report_aggregation'), 'exclude': lambda f: f is None }}) r"""A list of available aggregations.""" diff --git a/src/airbyte_api/models/source_datagen.py b/src/airbyte_api/models/source_datagen.py new file mode 100644 index 00000000..a20991cc --- /dev/null +++ b/src/airbyte_api/models/source_datagen.py @@ -0,0 +1,56 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Any, Dict, Final, Optional, Union + + +class SourceDatagenSchemasDataType(str, Enum): + TYPES = 'types' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class AllTypes: + r"""Generates one column of each Airbyte data type.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + data_type: Optional[SourceDatagenSchemasDataType] = dataclasses.field(default=SourceDatagenSchemasDataType.TYPES, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_type'), 'exclude': lambda f: f is None }}) + + + + +class SourceDatagenDataType(str, Enum): + INCREMENT = 'increment' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class Incremental: + r"""Generates incrementally increasing numerical data for the source.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + data_type: Optional[SourceDatagenDataType] = dataclasses.field(default=SourceDatagenDataType.INCREMENT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_type'), 'exclude': lambda f: f is None }}) + + + + +class Datagen(str, Enum): + DATAGEN = 'datagen' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceDatagen: + flavor: DataGenerationType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flavor') }}) + r"""Different patterns for generating data""" + concurrency: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('concurrency'), 'exclude': lambda f: f is None }}) + r"""Maximum number of concurrent data generators. Leave empty to let Airbyte optimize performance.""" + max_records: Optional[int] = dataclasses.field(default=100, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_records'), 'exclude': lambda f: f is None }}) + r"""The number of record messages to emit from this connector. Min 1. Max 100 billion.""" + SOURCE_TYPE: Final[Datagen] = dataclasses.field(default=Datagen.DATAGEN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + + +DataGenerationType = Union[Incremental, AllTypes] diff --git a/src/airbyte_api/models/source_db2_enterprise.py b/src/airbyte_api/models/source_db2_enterprise.py new file mode 100644 index 00000000..fa646ff6 --- /dev/null +++ b/src/airbyte_api/models/source_db2_enterprise.py @@ -0,0 +1,170 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Any, Dict, Final, List, Optional, Union + + +class SourceDb2EnterpriseCursorMethod(str, Enum): + CDC = 'cdc' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class ReadChangesUsingChangeDataCaptureCDC: + r"""Recommended - Incrementally reads new inserts, updates, and deletes using change data capture feature. This must be enabled on your database.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + cursor_method: Optional[SourceDb2EnterpriseCursorMethod] = dataclasses.field(default=SourceDb2EnterpriseCursorMethod.CDC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cursor_method'), 'exclude': lambda f: f is None }}) + initial_load_timeout_hours: Optional[int] = dataclasses.field(default=8, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('initial_load_timeout_hours'), 'exclude': lambda f: f is None }}) + r"""The amount of time an initial load is allowed to continue for before catching up on CDC events.""" + + + + +class CursorMethod(str, Enum): + USER_DEFINED = 'user_defined' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class ScanChangesWithUserDefinedCursor: + r"""Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + cursor_method: Optional[CursorMethod] = dataclasses.field(default=CursorMethod.USER_DEFINED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cursor_method'), 'exclude': lambda f: f is None }}) + + + + +class SourceDb2EnterpriseSchemasEncryptionMethod(str, Enum): + ENCRYPTED_VERIFY_CERTIFICATE = 'encrypted_verify_certificate' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceDb2EnterpriseTLSEncryptedVerifyCertificate: + r"""Verify and use the certificate provided by the server.""" + ssl_certificate: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_certificate') }}) + r"""Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + encryption_method: Optional[SourceDb2EnterpriseSchemasEncryptionMethod] = dataclasses.field(default=SourceDb2EnterpriseSchemasEncryptionMethod.ENCRYPTED_VERIFY_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method'), 'exclude': lambda f: f is None }}) + + + + +class SourceDb2EnterpriseEncryptionMethod(str, Enum): + UNENCRYPTED = 'unencrypted' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceDb2EnterpriseUnencrypted: + r"""Data transfer will not be encrypted.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + encryption_method: Optional[SourceDb2EnterpriseEncryptionMethod] = dataclasses.field(default=SourceDb2EnterpriseEncryptionMethod.UNENCRYPTED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method'), 'exclude': lambda f: f is None }}) + + + + +class Db2Enterprise(str, Enum): + DB2_ENTERPRISE = 'db2-enterprise' + + +class SourceDb2EnterpriseSchemasTunnelMethodTunnelMethod(str, Enum): + SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceDb2EnterprisePasswordAuthentication: + r"""Connect through a jump server tunnel host using username and password authentication""" + tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }}) + r"""Hostname of the jump server host that allows inbound ssh tunnel.""" + tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }}) + r"""OS-level username for logging into the jump server host""" + tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }}) + r"""OS-level password for logging into the jump server host""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[SourceDb2EnterpriseSchemasTunnelMethodTunnelMethod] = dataclasses.field(default=SourceDb2EnterpriseSchemasTunnelMethodTunnelMethod.SSH_PASSWORD_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) + tunnel_port: Optional[int] = dataclasses.field(default=22, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port'), 'exclude': lambda f: f is None }}) + r"""Port on the proxy/jump server that accepts inbound ssh connections.""" + + + + +class SourceDb2EnterpriseSchemasTunnelMethod(str, Enum): + SSH_KEY_AUTH = 'SSH_KEY_AUTH' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceDb2EnterpriseSSHKeyAuthentication: + r"""Connect through a jump server tunnel host using username and ssh key""" + ssh_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssh_key') }}) + r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )""" + tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }}) + r"""Hostname of the jump server host that allows inbound ssh tunnel.""" + tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }}) + r"""OS-level username for logging into the jump server host""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[SourceDb2EnterpriseSchemasTunnelMethod] = dataclasses.field(default=SourceDb2EnterpriseSchemasTunnelMethod.SSH_KEY_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) + tunnel_port: Optional[int] = dataclasses.field(default=22, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port'), 'exclude': lambda f: f is None }}) + r"""Port on the proxy/jump server that accepts inbound ssh connections.""" + + + + +class SourceDb2EnterpriseTunnelMethod(str, Enum): + NO_TUNNEL = 'NO_TUNNEL' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceDb2EnterpriseNoTunnel: + r"""No ssh tunnel needed to connect to database""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + tunnel_method: Optional[SourceDb2EnterpriseTunnelMethod] = dataclasses.field(default=SourceDb2EnterpriseTunnelMethod.NO_TUNNEL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceDb2Enterprise: + cursor: UpdateMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cursor') }}) + r"""Configures how data is extracted from the database.""" + database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }}) + r"""The database name.""" + encryption: SourceDb2EnterpriseEncryption = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption') }}) + r"""The encryption method with is used when communicating with the database.""" + host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }}) + r"""Hostname of the database.""" + schemas: List[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schemas') }}) + r"""The list of schemas to sync from.""" + tunnel_method: SourceDb2EnterpriseSSHTunnelMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }}) + r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + r"""The username which is used to access the database.""" + check_privileges: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('check_privileges'), 'exclude': lambda f: f is None }}) + r"""When this feature is enabled, during schema discovery the connector will query each table or view individually to check access privileges and inaccessible tables, views, or columns therein will be removed. In large schemas, this might cause schema discovery to take too long, in which case it might be advisable to disable this feature.""" + checkpoint_target_interval_seconds: Optional[int] = dataclasses.field(default=300, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('checkpoint_target_interval_seconds'), 'exclude': lambda f: f is None }}) + r"""How often (in seconds) a stream should checkpoint, when possible.""" + concurrency: Optional[int] = dataclasses.field(default=1, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('concurrency'), 'exclude': lambda f: f is None }}) + r"""Maximum number of concurrent queries to the database.""" + jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) + r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).""" + password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + r"""The password associated with the username.""" + port: Optional[int] = dataclasses.field(default=50000, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port'), 'exclude': lambda f: f is None }}) + r"""Port of the database.""" + SOURCE_TYPE: Final[Db2Enterprise] = dataclasses.field(default=Db2Enterprise.DB2_ENTERPRISE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + + +UpdateMethod = Union[ScanChangesWithUserDefinedCursor, ReadChangesUsingChangeDataCaptureCDC] + +SourceDb2EnterpriseEncryption = Union[SourceDb2EnterpriseUnencrypted, SourceDb2EnterpriseTLSEncryptedVerifyCertificate] + +SourceDb2EnterpriseSSHTunnelMethod = Union[SourceDb2EnterpriseNoTunnel, SourceDb2EnterpriseSSHKeyAuthentication, SourceDb2EnterprisePasswordAuthentication] diff --git a/src/airbyte_api/models/source_defillama.py b/src/airbyte_api/models/source_defillama.py new file mode 100644 index 00000000..08f4812c --- /dev/null +++ b/src/airbyte_api/models/source_defillama.py @@ -0,0 +1,20 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class Defillama(str, Enum): + DEFILLAMA = 'defillama' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceDefillama: + SOURCE_TYPE: Final[Optional[Defillama]] = dataclasses.field(default=Defillama.DEFILLAMA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType'), 'exclude': lambda f: f is None }}) + + diff --git a/src/airbyte_api/models/source_facebook_marketing.py b/src/airbyte_api/models/source_facebook_marketing.py index d2a8dcbc..060e8fe5 100644 --- a/src/airbyte_api/models/source_facebook_marketing.py +++ b/src/airbyte_api/models/source_facebook_marketing.py @@ -79,7 +79,7 @@ class AuthenticateViaFacebookMarketingOauth: -class ValidActionBreakdowns(str, Enum): +class SourceFacebookMarketingValidActionBreakdowns(str, Enum): r"""An enumeration.""" ACTION_CANVAS_COMPONENT_NAME = 'action_canvas_component_name' ACTION_CAROUSEL_CARD_ID = 'action_carousel_card_id' @@ -100,26 +100,36 @@ class ValidActionBreakdowns(str, Enum): class ValidBreakdowns(str, Enum): r"""An enumeration.""" + AD_EXTENSION_DOMAIN = 'ad_extension_domain' + AD_EXTENSION_URL = 'ad_extension_url' AD_FORMAT_ASSET = 'ad_format_asset' AGE = 'age' APP_ID = 'app_id' BODY_ASSET = 'body_asset' + BREAKDOWN_AD_OBJECTIVE = 'breakdown_ad_objective' BREAKDOWN_REPORTING_AD_ID = 'breakdown_reporting_ad_id' CALL_TO_ACTION_ASSET = 'call_to_action_asset' COARSE_CONVERSION_VALUE = 'coarse_conversion_value' + COMSCORE_MARKET = 'comscore_market' + COMSCORE_MARKET_CODE = 'comscore_market_code' CONVERSION_DESTINATION = 'conversion_destination' COUNTRY = 'country' + CREATIVE_RELAXATION_ASSET_TYPE = 'creative_relaxation_asset_type' DESCRIPTION_ASSET = 'description_asset' DEVICE_PLATFORM = 'device_platform' DMA = 'dma' FIDELITY_TYPE = 'fidelity_type' + FLEXIBLE_FORMAT_ASSET_TYPE = 'flexible_format_asset_type' FREQUENCY_VALUE = 'frequency_value' + GEN_AI_ASSET_TYPE = 'gen_ai_asset_type' GENDER = 'gender' HOURLY_STATS_AGGREGATED_BY_ADVERTISER_TIME_ZONE = 'hourly_stats_aggregated_by_advertiser_time_zone' HOURLY_STATS_AGGREGATED_BY_AUDIENCE_TIME_ZONE = 'hourly_stats_aggregated_by_audience_time_zone' HSID = 'hsid' IMAGE_ASSET = 'image_asset' IMPRESSION_DEVICE = 'impression_device' + IMPRESSION_VIEW_TIME_ADVERTISER_HOUR_V2 = 'impression_view_time_advertiser_hour_v2' + IS_AUTO_ADVANCE = 'is_auto_advance' IS_CONVERSION_ID_MODELED = 'is_conversion_id_modeled' IS_RENDERED_AS_DELAYED_SKIP_AD = 'is_rendered_as_delayed_skip_ad' LANDING_DESTINATION = 'landing_destination' @@ -155,6 +165,7 @@ class ValidBreakdowns(str, Enum): USER_PERSONA_ID = 'user_persona_id' USER_PERSONA_NAME = 'user_persona_name' VIDEO_ASSET = 'video_asset' + USER_SEGMENT_KEY = 'user_segment_key' class SourceFacebookMarketingValidEnums(str, Enum): @@ -188,11 +199,30 @@ class SourceFacebookMarketingValidEnums(str, Enum): CATALOG_SEGMENT_VALUE_OMNI_PURCHASE_ROAS = 'catalog_segment_value_omni_purchase_roas' CATALOG_SEGMENT_VALUE_WEBSITE_PURCHASE_ROAS = 'catalog_segment_value_website_purchase_roas' CLICKS = 'clicks' + CONVERSION_LEADS = 'conversion_leads' CONVERSION_RATE_RANKING = 'conversion_rate_ranking' CONVERSION_VALUES = 'conversion_values' CONVERSIONS = 'conversions' + CONVERTED_PRODUCT_APP_CUSTOM_EVENT_FB_MOBILE_PURCHASE = 'converted_product_app_custom_event_fb_mobile_purchase' + CONVERTED_PRODUCT_APP_CUSTOM_EVENT_FB_MOBILE_PURCHASE_VALUE = 'converted_product_app_custom_event_fb_mobile_purchase_value' + CONVERTED_PRODUCT_OFFLINE_PURCHASE = 'converted_product_offline_purchase' + CONVERTED_PRODUCT_OFFLINE_PURCHASE_VALUE = 'converted_product_offline_purchase_value' + CONVERTED_PRODUCT_OMNI_PURCHASE = 'converted_product_omni_purchase' + CONVERTED_PRODUCT_OMNI_PURCHASE_VALUES = 'converted_product_omni_purchase_values' CONVERTED_PRODUCT_QUANTITY = 'converted_product_quantity' CONVERTED_PRODUCT_VALUE = 'converted_product_value' + CONVERTED_PRODUCT_WEBSITE_PIXEL_PURCHASE = 'converted_product_website_pixel_purchase' + CONVERTED_PRODUCT_WEBSITE_PIXEL_PURCHASE_VALUE = 'converted_product_website_pixel_purchase_value' + CONVERTED_PROMOTED_PRODUCT_APP_CUSTOM_EVENT_FB_MOBILE_PURCHASE = 'converted_promoted_product_app_custom_event_fb_mobile_purchase' + CONVERTED_PROMOTED_PRODUCT_APP_CUSTOM_EVENT_FB_MOBILE_PURCHASE_VALUE = 'converted_promoted_product_app_custom_event_fb_mobile_purchase_value' + CONVERTED_PROMOTED_PRODUCT_OFFLINE_PURCHASE = 'converted_promoted_product_offline_purchase' + CONVERTED_PROMOTED_PRODUCT_OFFLINE_PURCHASE_VALUE = 'converted_promoted_product_offline_purchase_value' + CONVERTED_PROMOTED_PRODUCT_OMNI_PURCHASE = 'converted_promoted_product_omni_purchase' + CONVERTED_PROMOTED_PRODUCT_OMNI_PURCHASE_VALUES = 'converted_promoted_product_omni_purchase_values' + CONVERTED_PROMOTED_PRODUCT_QUANTITY = 'converted_promoted_product_quantity' + CONVERTED_PROMOTED_PRODUCT_VALUE = 'converted_promoted_product_value' + CONVERTED_PROMOTED_PRODUCT_WEBSITE_PIXEL_PURCHASE = 'converted_promoted_product_website_pixel_purchase' + CONVERTED_PROMOTED_PRODUCT_WEBSITE_PIXEL_PURCHASE_VALUE = 'converted_promoted_product_website_pixel_purchase_value' COST_PER_15_SEC_VIDEO_VIEW = 'cost_per_15_sec_video_view' COST_PER_2_SEC_CONTINUOUS_VIDEO_VIEW = 'cost_per_2_sec_continuous_video_view' COST_PER_ACTION_TYPE = 'cost_per_action_type' @@ -202,8 +232,10 @@ class SourceFacebookMarketingValidEnums(str, Enum): COST_PER_ESTIMATED_AD_RECALLERS = 'cost_per_estimated_ad_recallers' COST_PER_INLINE_LINK_CLICK = 'cost_per_inline_link_click' COST_PER_INLINE_POST_ENGAGEMENT = 'cost_per_inline_post_engagement' + COST_PER_OBJECTIVE_RESULT = 'cost_per_objective_result' COST_PER_ONE_THOUSAND_AD_IMPRESSION = 'cost_per_one_thousand_ad_impression' COST_PER_OUTBOUND_CLICK = 'cost_per_outbound_click' + COST_PER_RESULT = 'cost_per_result' COST_PER_THRUPLAY = 'cost_per_thruplay' COST_PER_UNIQUE_ACTION_TYPE = 'cost_per_unique_action_type' COST_PER_UNIQUE_CLICK = 'cost_per_unique_click' @@ -241,31 +273,67 @@ class SourceFacebookMarketingValidEnums(str, Enum): INSTANT_EXPERIENCE_OUTBOUND_CLICKS = 'instant_experience_outbound_clicks' INTERACTIVE_COMPONENT_TAP = 'interactive_component_tap' LABELS = 'labels' + LANDING_PAGE_VIEW_ACTIONS_PER_LINK_CLICK = 'landing_page_view_actions_per_link_click' + LANDING_PAGE_VIEW_PER_LINK_CLICK = 'landing_page_view_per_link_click' + LANDING_PAGE_VIEW_PER_PURCHASE_RATE = 'landing_page_view_per_purchase_rate' + LINK_CLICKS_PER_RESULTS = 'link_clicks_per_results' LOCATION = 'location' + MARKETING_MESSAGES_CLICK_RATE_BENCHMARK = 'marketing_messages_click_rate_benchmark' MARKETING_MESSAGES_COST_PER_DELIVERED = 'marketing_messages_cost_per_delivered' MARKETING_MESSAGES_COST_PER_LINK_BTN_CLICK = 'marketing_messages_cost_per_link_btn_click' + MARKETING_MESSAGES_DELIVERED = 'marketing_messages_delivered' MARKETING_MESSAGES_DELIVERY_RATE = 'marketing_messages_delivery_rate' + MARKETING_MESSAGES_LINK_BTN_CLICK = 'marketing_messages_link_btn_click' MARKETING_MESSAGES_LINK_BTN_CLICK_RATE = 'marketing_messages_link_btn_click_rate' MARKETING_MESSAGES_MEDIA_VIEW_RATE = 'marketing_messages_media_view_rate' MARKETING_MESSAGES_PHONE_CALL_BTN_CLICK_RATE = 'marketing_messages_phone_call_btn_click_rate' + MARKETING_MESSAGES_QUICK_REPLY_BTN_CLICK = 'marketing_messages_quick_reply_btn_click' MARKETING_MESSAGES_QUICK_REPLY_BTN_CLICK_RATE = 'marketing_messages_quick_reply_btn_click_rate' + MARKETING_MESSAGES_READ = 'marketing_messages_read' MARKETING_MESSAGES_READ_RATE = 'marketing_messages_read_rate' + MARKETING_MESSAGES_READ_RATE_BENCHMARK = 'marketing_messages_read_rate_benchmark' + MARKETING_MESSAGES_SENT = 'marketing_messages_sent' MARKETING_MESSAGES_SPEND = 'marketing_messages_spend' + MARKETING_MESSAGES_SPEND_CURRENCY = 'marketing_messages_spend_currency' + MARKETING_MESSAGES_WEBSITE_ADD_TO_CART = 'marketing_messages_website_add_to_cart' + MARKETING_MESSAGES_WEBSITE_INITIATE_CHECKOUT = 'marketing_messages_website_initiate_checkout' + MARKETING_MESSAGES_WEBSITE_PURCHASE = 'marketing_messages_website_purchase' MARKETING_MESSAGES_WEBSITE_PURCHASE_VALUES = 'marketing_messages_website_purchase_values' MOBILE_APP_PURCHASE_ROAS = 'mobile_app_purchase_roas' OBJECTIVE = 'objective' + OBJECTIVE_RESULT_RATE = 'objective_result_rate' + OBJECTIVE_RESULTS = 'objective_results' ONSITE_CONVERSION_MESSAGING_DETECTED_PURCHASE_DEDUPED = 'onsite_conversion_messaging_detected_purchase_deduped' OPTIMIZATION_GOAL = 'optimization_goal' OUTBOUND_CLICKS = 'outbound_clicks' OUTBOUND_CLICKS_CTR = 'outbound_clicks_ctr' PLACE_PAGE_NAME = 'place_page_name' + PRODUCT_BRAND = 'product_brand' + PRODUCT_CATEGORY = 'product_category' + PRODUCT_CONTENT_ID = 'product_content_id' + PRODUCT_CUSTOM_LABEL_0 = 'product_custom_label_0' + PRODUCT_CUSTOM_LABEL_1 = 'product_custom_label_1' + PRODUCT_CUSTOM_LABEL_2 = 'product_custom_label_2' + PRODUCT_CUSTOM_LABEL_3 = 'product_custom_label_3' + PRODUCT_CUSTOM_LABEL_4 = 'product_custom_label_4' + PRODUCT_GROUP_CONTENT_ID = 'product_group_content_id' + PRODUCT_GROUP_RETAILER_ID = 'product_group_retailer_id' + PRODUCT_NAME = 'product_name' + PRODUCT_RETAILER_ID = 'product_retailer_id' + PRODUCT_VIEWS = 'product_views' + PURCHASE_PER_LANDING_PAGE_VIEW = 'purchase_per_landing_page_view' PURCHASE_ROAS = 'purchase_roas' + PURCHASES_PER_LINK_CLICK = 'purchases_per_link_click' QUALIFYING_QUESTION_QUALIFY_ANSWER_RATE = 'qualifying_question_qualify_answer_rate' QUALITY_RANKING = 'quality_ranking' REACH = 'reach' + RESULT_RATE = 'result_rate' + RESULT_VALUES_PERFORMANCE_INDICATOR = 'result_values_performance_indicator' + RESULTS = 'results' SHOPS_ASSISTED_PURCHASES = 'shops_assisted_purchases' SOCIAL_SPEND = 'social_spend' SPEND = 'spend' + TOTAL_CARD_VIEW = 'total_card_view' TOTAL_POSTBACKS = 'total_postbacks' TOTAL_POSTBACKS_DETAILED = 'total_postbacks_detailed' TOTAL_POSTBACKS_DETAILED_V4 = 'total_postbacks_detailed_v4' @@ -297,6 +365,7 @@ class SourceFacebookMarketingValidEnums(str, Enum): VIDEO_PLAY_RETENTION_GRAPH_ACTIONS = 'video_play_retention_graph_actions' VIDEO_THRUPLAY_WATCHED_ACTIONS = 'video_thruplay_watched_actions' VIDEO_TIME_WATCHED_ACTIONS = 'video_time_watched_actions' + VIDEO_VIEW_PER_IMPRESSION = 'video_view_per_impression' WEBSITE_CTR = 'website_ctr' WEBSITE_PURCHASE_ROAS = 'website_purchase_roas' WISH_BID = 'wish_bid' @@ -316,7 +385,7 @@ class InsightConfig: r"""Config for custom insights""" name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) r"""The name value of insight""" - action_breakdowns: Optional[List[ValidActionBreakdowns]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('action_breakdowns'), 'exclude': lambda f: f is None }}) + action_breakdowns: Optional[List[SourceFacebookMarketingValidActionBreakdowns]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('action_breakdowns'), 'exclude': lambda f: f is None }}) r"""A list of chosen action_breakdowns for action_breakdowns""" breakdowns: Optional[List[ValidBreakdowns]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('breakdowns'), 'exclude': lambda f: f is None }}) r"""A list of chosen breakdowns for breakdowns""" @@ -338,6 +407,25 @@ class InsightConfig: +class ValidActionBreakdowns(str, Enum): + r"""An enumeration.""" + ACTION_CANVAS_COMPONENT_NAME = 'action_canvas_component_name' + ACTION_CAROUSEL_CARD_ID = 'action_carousel_card_id' + ACTION_CAROUSEL_CARD_NAME = 'action_carousel_card_name' + ACTION_DESTINATION = 'action_destination' + ACTION_DEVICE = 'action_device' + ACTION_REACTION = 'action_reaction' + ACTION_TARGET_ID = 'action_target_id' + ACTION_TYPE = 'action_type' + ACTION_VIDEO_SOUND = 'action_video_sound' + ACTION_VIDEO_TYPE = 'action_video_type' + CONVERSION_DESTINATION = 'conversion_destination' + MATCHED_PERSONA_ID = 'matched_persona_id' + MATCHED_PERSONA_NAME = 'matched_persona_name' + SIGNAL_SOURCE_BUCKET = 'signal_source_bucket' + STANDARD_EVENT_CONTENT_TYPE = 'standard_event_content_type' + + class SourceFacebookMarketingFacebookMarketing(str, Enum): FACEBOOK_MARKETING = 'facebook-marketing' @@ -359,6 +447,8 @@ class SourceFacebookMarketing: r"""Select the statuses you want to be loaded in the stream. If no specific statuses are selected, the API's default behavior applies, and some statuses may be filtered out.""" custom_insights: Optional[List[InsightConfig]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_insights'), 'exclude': lambda f: f is None }}) r"""A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on \\"add\\" to fill this field.""" + default_ads_insights_action_breakdowns: Optional[List[ValidActionBreakdowns]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('default_ads_insights_action_breakdowns'), 'exclude': lambda f: f is None }}) + r"""Action breakdowns for the Built-in Ads Insights stream that will be used in the request. You can override default values or remove them to make it empty if needed.""" end_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) r"""The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data.""" fetch_thumbnail_images: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('fetch_thumbnail_images'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_freshcaller.py b/src/airbyte_api/models/source_freshcaller.py index af656c15..c24c2af8 100644 --- a/src/airbyte_api/models/source_freshcaller.py +++ b/src/airbyte_api/models/source_freshcaller.py @@ -18,7 +18,7 @@ class Freshcaller(str, Enum): @dataclasses.dataclass class SourceFreshcaller: api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) - r"""Freshcaller API Key. See the docs for more information on how to obtain this key.""" + r"""Freshcaller API Key. See the docs for more information on how to obtain this key.""" domain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('domain') }}) r"""Used to construct Base URL for the Freshcaller APIs""" requests_per_minute: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('requests_per_minute'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_freshdesk.py b/src/airbyte_api/models/source_freshdesk.py index 63371636..62d4eb3e 100644 --- a/src/airbyte_api/models/source_freshdesk.py +++ b/src/airbyte_api/models/source_freshdesk.py @@ -7,7 +7,101 @@ from dataclasses_json import Undefined, dataclass_json from datetime import datetime from enum import Enum -from typing import Final, Optional +from typing import Final, Optional, Union + + +class SourceFreshdeskSchemasRateLimitPlanRateLimitPlanPlan(str, Enum): + CUSTOM = 'custom' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class CustomPlan: + contacts_rate_limit: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('contacts_rate_limit'), 'exclude': lambda f: f is None }}) + r"""Maximum Rate in Limit/minute for contacts list endpoint in Custom Plan""" + general_rate_limit: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('general_rate_limit'), 'exclude': lambda f: f is None }}) + r"""General Maximum Rate in Limit/minute for other endpoints in Custom Plan""" + PLAN_TYPE: Final[Optional[SourceFreshdeskSchemasRateLimitPlanRateLimitPlanPlan]] = dataclasses.field(default=SourceFreshdeskSchemasRateLimitPlanRateLimitPlanPlan.CUSTOM, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('plan_type'), 'exclude': lambda f: f is None }}) + tickets_rate_limit: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tickets_rate_limit'), 'exclude': lambda f: f is None }}) + r"""Maximum Rate in Limit/minute for tickets list endpoint in Custom Plan""" + + + + +class SourceFreshdeskSchemasRateLimitPlanPlan(str, Enum): + ENTERPRISE = 'enterprise' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class EnterprisePlan: + UNSET='__SPEAKEASY_UNSET__' + CONTACTS_RATE_LIMIT: Final[Optional[int]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('contacts_rate_limit'), 'exclude': lambda f: f is EnterprisePlan.UNSET }}) + r"""Maximum Rate in Limit/minute for contacts list endpoint in Enterprise Plan""" + GENERAL_RATE_LIMIT: Final[Optional[int]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('general_rate_limit'), 'exclude': lambda f: f is EnterprisePlan.UNSET }}) + r"""General Maximum Rate in Limit/minute for other endpoints in Enterprise Plan""" + PLAN_TYPE: Final[Optional[SourceFreshdeskSchemasRateLimitPlanPlan]] = dataclasses.field(default=SourceFreshdeskSchemasRateLimitPlanPlan.ENTERPRISE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('plan_type'), 'exclude': lambda f: f is None }}) + TICKETS_RATE_LIMIT: Final[Optional[int]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tickets_rate_limit'), 'exclude': lambda f: f is EnterprisePlan.UNSET }}) + r"""Maximum Rate in Limit/minute for tickets list endpoint in Enterprise Plan""" + + + + +class SourceFreshdeskSchemasPlan(str, Enum): + PRO = 'pro' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class ProPlan: + UNSET='__SPEAKEASY_UNSET__' + CONTACTS_RATE_LIMIT: Final[Optional[int]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('contacts_rate_limit'), 'exclude': lambda f: f is ProPlan.UNSET }}) + r"""Maximum Rate in Limit/minute for contacts list endpoint in Pro Plan""" + GENERAL_RATE_LIMIT: Final[Optional[int]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('general_rate_limit'), 'exclude': lambda f: f is ProPlan.UNSET }}) + r"""General Maximum Rate in Limit/minute for other endpoints in Pro Plan""" + PLAN_TYPE: Final[Optional[SourceFreshdeskSchemasPlan]] = dataclasses.field(default=SourceFreshdeskSchemasPlan.PRO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('plan_type'), 'exclude': lambda f: f is None }}) + TICKETS_RATE_LIMIT: Final[Optional[int]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tickets_rate_limit'), 'exclude': lambda f: f is ProPlan.UNSET }}) + r"""Maximum Rate in Limit/minute for tickets list endpoint in Pro Plan""" + + + + +class SourceFreshdeskPlan(str, Enum): + GROWTH = 'growth' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class GrowthPlan: + UNSET='__SPEAKEASY_UNSET__' + CONTACTS_RATE_LIMIT: Final[Optional[int]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('contacts_rate_limit'), 'exclude': lambda f: f is GrowthPlan.UNSET }}) + r"""Maximum Rate in Limit/minute for contacts list endpoint in Growth Plan""" + GENERAL_RATE_LIMIT: Final[Optional[int]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('general_rate_limit'), 'exclude': lambda f: f is GrowthPlan.UNSET }}) + r"""General Maximum Rate in Limit/minute for other endpoints in Growth Plan""" + PLAN_TYPE: Final[Optional[SourceFreshdeskPlan]] = dataclasses.field(default=SourceFreshdeskPlan.GROWTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('plan_type'), 'exclude': lambda f: f is None }}) + TICKETS_RATE_LIMIT: Final[Optional[int]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tickets_rate_limit'), 'exclude': lambda f: f is GrowthPlan.UNSET }}) + r"""Maximum Rate in Limit/minute for tickets list endpoint in Growth Plan""" + + + + +class Plan(str, Enum): + FREE = 'free' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class FreePlan: + UNSET='__SPEAKEASY_UNSET__' + CONTACTS_RATE_LIMIT: Final[Optional[int]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('contacts_rate_limit'), 'exclude': lambda f: f is FreePlan.UNSET }}) + r"""Maximum Rate in Limit/minute for contacts list endpoint in Free Plan""" + GENERAL_RATE_LIMIT: Final[Optional[int]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('general_rate_limit'), 'exclude': lambda f: f is FreePlan.UNSET }}) + r"""General Maximum Rate in Limit/minute for other endpoints in Free Plan""" + PLAN_TYPE: Final[Optional[Plan]] = dataclasses.field(default=Plan.FREE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('plan_type'), 'exclude': lambda f: f is None }}) + TICKETS_RATE_LIMIT: Final[Optional[int]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tickets_rate_limit'), 'exclude': lambda f: f is FreePlan.UNSET }}) + r"""Maximum Rate in Limit/minute for tickets list endpoint in Free Plan""" + + class Freshdesk(str, Enum): @@ -23,6 +117,8 @@ class SourceFreshdesk: r"""Freshdesk domain""" lookback_window_in_days: Optional[int] = dataclasses.field(default=14, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window_in_days'), 'exclude': lambda f: f is None }}) r"""Number of days for lookback window for the stream Satisfaction Ratings""" + rate_limit_plan: Optional[RateLimitPlan] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('rate_limit_plan'), 'exclude': lambda f: f is None }}) + r"""Rate Limit Plan for API Budget""" requests_per_minute: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('requests_per_minute'), 'exclude': lambda f: f is None }}) r"""The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account.""" SOURCE_TYPE: Final[Freshdesk] = dataclasses.field(default=Freshdesk.FRESHDESK, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) @@ -30,3 +126,5 @@ class SourceFreshdesk: r"""UTC date and time. Any data created after this date will be replicated. If this parameter is not set, all data will be replicated.""" + +RateLimitPlan = Union[FreePlan, GrowthPlan, ProPlan, EnterprisePlan, CustomPlan] diff --git a/src/airbyte_api/models/source_gcs.py b/src/airbyte_api/models/source_gcs.py index ccccf34a..d169fb13 100644 --- a/src/airbyte_api/models/source_gcs.py +++ b/src/airbyte_api/models/source_gcs.py @@ -54,7 +54,7 @@ class SourceGcsSchemasStreamsFormatFormat6Filetype(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class ExcelFormat: +class SourceGcsExcelFormat: FILETYPE: Final[Optional[SourceGcsSchemasStreamsFormatFormat6Filetype]] = dataclasses.field(default=SourceGcsSchemasStreamsFormatFormat6Filetype.EXCEL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) @@ -301,4 +301,4 @@ class SourceGcs: SourceGcsCSVHeaderDefinition = Union[SourceGcsFromCSV, SourceGcsAutogenerated, SourceGcsUserProvided] -SourceGcsFormat = Union[SourceGcsAvroFormat, SourceGcsCSVFormat, SourceGcsJsonlFormat, SourceGcsParquetFormat, SourceGcsUnstructuredDocumentFormat, ExcelFormat] +SourceGcsFormat = Union[SourceGcsAvroFormat, SourceGcsCSVFormat, SourceGcsJsonlFormat, SourceGcsParquetFormat, SourceGcsUnstructuredDocumentFormat, SourceGcsExcelFormat] diff --git a/src/airbyte_api/models/source_mantle.py b/src/airbyte_api/models/source_mantle.py new file mode 100644 index 00000000..c6c54858 --- /dev/null +++ b/src/airbyte_api/models/source_mantle.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Mantle(str, Enum): + MANTLE = 'mantle' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceMantle: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Mantle] = dataclasses.field(default=Mantle.MANTLE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_metricool.py b/src/airbyte_api/models/source_metricool.py new file mode 100644 index 00000000..72fbc40e --- /dev/null +++ b/src/airbyte_api/models/source_metricool.py @@ -0,0 +1,32 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Any, Final, List, Optional + + +class Metricool(str, Enum): + METRICOOL = 'metricool' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceMetricool: + blog_ids: List[Any] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('blog_ids') }}) + r"""Brand IDs""" + user_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('user_id') }}) + r"""Account ID""" + user_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('user_token') }}) + r"""User token to authenticate API requests. Find it in the Account Settings menu, API section of your Metricool account.""" + end_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) + r"""If not set, defaults to current datetime.""" + SOURCE_TYPE: Final[Metricool] = dataclasses.field(default=Metricool.METRICOOL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) + r"""If not set, defaults to 60 days back. If below \\"End Date\\", defaults to 1 day before \\"End Date\\" """ + + diff --git a/src/airbyte_api/models/source_mixpanel.py b/src/airbyte_api/models/source_mixpanel.py index 9f114256..e3f341d4 100644 --- a/src/airbyte_api/models/source_mixpanel.py +++ b/src/airbyte_api/models/source_mixpanel.py @@ -65,6 +65,8 @@ class SourceMixpanel: r"""The date in the format YYYY-MM-DD. Any data after this date will not be replicated. Left empty to always sync to most recent date""" export_lookback_window: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('export_lookback_window'), 'exclude': lambda f: f is None }}) r"""The number of seconds to look back from the last synced timestamp during incremental syncs of the Export stream. This ensures no data is missed due to delays in event recording. Default is 0 seconds. Must be a non-negative integer.""" + num_workers: Optional[int] = dataclasses.field(default=3, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_workers'), 'exclude': lambda f: f is None }}) + r"""The number of worker threads to use for the sync. The performance upper boundary is based on the limit of your Mixpanel pricing plan. More info about the rate limit tiers can be found on Mixpanel's API docs.""" page_size: Optional[int] = dataclasses.field(default=1000, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('page_size'), 'exclude': lambda f: f is None }}) r"""The number of records to fetch per request for the engage stream. Default is 1000. If you are experiencing long sync times with this stream, try increasing this value.""" project_timezone: Optional[str] = dataclasses.field(default='US/Pacific', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('project_timezone'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_mssql.py b/src/airbyte_api/models/source_mssql.py index a62faea4..4108be4d 100644 --- a/src/airbyte_api/models/source_mssql.py +++ b/src/airbyte_api/models/source_mssql.py @@ -14,7 +14,7 @@ class SourceMssqlSchemasMethod(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class ScanChangesWithUserDefinedCursor: +class SourceMssqlScanChangesWithUserDefinedCursor: r"""Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).""" exclude_todays_data: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('exclude_todays_data'), 'exclude': lambda f: f is None }}) r"""When enabled incremental syncs using a cursor of a temporal types (date or datetime) will include cursor values only up until last midnight (Advanced)""" @@ -35,7 +35,7 @@ class SourceMssqlMethod(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class ReadChangesUsingChangeDataCaptureCDC: +class SourceMssqlReadChangesUsingChangeDataCaptureCDC: r"""Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database.""" initial_load_timeout_hours: Optional[int] = dataclasses.field(default=8, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('initial_load_timeout_hours'), 'exclude': lambda f: f is None }}) r"""The amount of time an initial load is allowed to continue for before catching up on CDC logs.""" @@ -170,7 +170,7 @@ class SourceMssql: r"""The username which is used to access the database.""" jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).""" - replication_method: Optional[UpdateMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('replication_method'), 'exclude': lambda f: f is None }}) + replication_method: Optional[SourceMssqlUpdateMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('replication_method'), 'exclude': lambda f: f is None }}) r"""Configures how data is extracted from the database.""" schemas: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schemas'), 'exclude': lambda f: f is None }}) r"""The list of schemas to sync from. Defaults to user. Case sensitive.""" @@ -182,7 +182,7 @@ class SourceMssql: -UpdateMethod = Union[ReadChangesUsingChangeDataCaptureCDC, ScanChangesWithUserDefinedCursor] +SourceMssqlUpdateMethod = Union[SourceMssqlReadChangesUsingChangeDataCaptureCDC, SourceMssqlScanChangesWithUserDefinedCursor] SourceMssqlSSLMethod = Union[SourceMssqlUnencrypted, SourceMssqlEncryptedTrustServerCertificate, SourceMssqlEncryptedVerifyCertificate] diff --git a/src/airbyte_api/models/source_netsuite_enterprise.py b/src/airbyte_api/models/source_netsuite_enterprise.py index e577c6b3..dd03392d 100644 --- a/src/airbyte_api/models/source_netsuite_enterprise.py +++ b/src/airbyte_api/models/source_netsuite_enterprise.py @@ -66,7 +66,7 @@ class SourceNetsuiteEnterprisePasswordAuthentication: -class CursorMethod(str, Enum): +class SourceNetsuiteEnterpriseCursorMethod(str, Enum): USER_DEFINED = 'user_defined' @@ -75,7 +75,7 @@ class CursorMethod(str, Enum): class SourceNetsuiteEnterpriseScanChangesWithUserDefinedCursor: r"""Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).""" additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) - cursor_method: Optional[CursorMethod] = dataclasses.field(default=CursorMethod.USER_DEFINED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cursor_method'), 'exclude': lambda f: f is None }}) + cursor_method: Optional[SourceNetsuiteEnterpriseCursorMethod] = dataclasses.field(default=SourceNetsuiteEnterpriseCursorMethod.USER_DEFINED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cursor_method'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_oracle_enterprise.py b/src/airbyte_api/models/source_oracle_enterprise.py index 61c48019..c1b1ed28 100644 --- a/src/airbyte_api/models/source_oracle_enterprise.py +++ b/src/airbyte_api/models/source_oracle_enterprise.py @@ -138,6 +138,19 @@ class OracleEnterprise(str, Enum): ORACLE_ENTERPRISE = 'oracle-enterprise' +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class TableFilter: + r"""Inclusion filter configuration for table selection per schema.""" + schema_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schema_name') }}) + r"""The name of the schema to apply this filter to. Should match a schema defined in \\"Schemas\\" field above.""" + table_name_patterns: List[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('table_name_patterns') }}) + r"""List of table name patterns to include from this schema. Should be a SQL LIKE pattern.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + + + + class SourceOracleEnterpriseSchemasTunnelMethodTunnelMethod(str, Enum): SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH' @@ -230,6 +243,8 @@ class SourceOracleEnterprise: schemas: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schemas'), 'exclude': lambda f: f is None }}) r"""The list of schemas to sync from. Defaults to user. Case sensitive.""" SOURCE_TYPE: Final[OracleEnterprise] = dataclasses.field(default=OracleEnterprise.ORACLE_ENTERPRISE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + table_filters: Optional[List[TableFilter]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('table_filters'), 'exclude': lambda f: f is None }}) + r"""Inclusion filters for table selection per schema. If no filters are specified for a schema, all tables in that schema will be synced.""" diff --git a/src/airbyte_api/models/source_outlook.py b/src/airbyte_api/models/source_outlook.py new file mode 100644 index 00000000..c6fef9de --- /dev/null +++ b/src/airbyte_api/models/source_outlook.py @@ -0,0 +1,28 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional + + +class Outlook(str, Enum): + OUTLOOK = 'outlook' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceOutlook: + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + r"""The Client ID of your Microsoft Azure application""" + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + r"""The Client Secret of your Microsoft Azure application""" + refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token') }}) + r"""Refresh token obtained from Microsoft OAuth flow""" + SOURCE_TYPE: Final[Outlook] = dataclasses.field(default=Outlook.OUTLOOK, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + tenant_id: Optional[str] = dataclasses.field(default='common', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tenant_id'), 'exclude': lambda f: f is None }}) + r"""Azure AD Tenant ID (optional for multi-tenant apps, defaults to 'common')""" + + diff --git a/src/airbyte_api/models/source_pardot.py b/src/airbyte_api/models/source_pardot.py index a45dbde1..1153cd7b 100644 --- a/src/airbyte_api/models/source_pardot.py +++ b/src/airbyte_api/models/source_pardot.py @@ -27,8 +27,6 @@ class SourcePardot: r"""Salesforce Refresh Token used for Airbyte to access your Salesforce account. If you don't know what this is, follow this guide to retrieve it.""" is_sandbox: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('is_sandbox'), 'exclude': lambda f: f is None }}) r"""Whether or not the the app is in a Salesforce sandbox. If you do not know what this, assume it is false.""" - page_size: Optional[str] = dataclasses.field(default='1000', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('page_size'), 'exclude': lambda f: f is None }}) - r"""The maximum number of records to return per request""" SOURCE_TYPE: Final[Pardot] = dataclasses.field(default=Pardot.PARDOT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[datetime] = dataclasses.field(default=dateutil.parser.isoparse('2007-01-01T00:00:00Z'), metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) r"""UTC date and time in the format 2000-01-01T00:00:00Z. Any data before this date will not be replicated. Defaults to the year Pardot was released.""" diff --git a/src/airbyte_api/models/source_pinterest.py b/src/airbyte_api/models/source_pinterest.py index 51ccd4b3..68f76d09 100644 --- a/src/airbyte_api/models/source_pinterest.py +++ b/src/airbyte_api/models/source_pinterest.py @@ -257,6 +257,8 @@ class SourcePinterest: credentials: Optional[OAuth20] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) custom_reports: Optional[List[ReportConfig]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_reports'), 'exclude': lambda f: f is None }}) r"""A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on \\"add\\" to fill this field.""" + num_threads: Optional[int] = dataclasses.field(default=2, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_threads'), 'exclude': lambda f: f is None }}) + r"""The number of parallel threads to use for the sync.""" SOURCE_TYPE: Final[Optional[SourcePinterestPinterest]] = dataclasses.field(default=SourcePinterestPinterest.PINTEREST, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType'), 'exclude': lambda f: f is None }}) start_date: Optional[date] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(True), 'decoder': utils.datefromisoformat, 'exclude': lambda f: f is None }}) r"""A date in the format YYYY-MM-DD. If you have not set a date, it would be defaulted to latest allowed date by api (89 days from today).""" diff --git a/src/airbyte_api/models/source_postgres.py b/src/airbyte_api/models/source_postgres.py index 10f3fea4..d2dad0c2 100644 --- a/src/airbyte_api/models/source_postgres.py +++ b/src/airbyte_api/models/source_postgres.py @@ -254,6 +254,12 @@ class SourcePostgres: r"""Hostname of the database.""" username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) r"""Username to access the database.""" + entra_client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('entra_client_id'), 'exclude': lambda f: f is None }}) + r"""If using Entra service principal, the application ID of the service principal""" + entra_service_principal_auth: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('entra_service_principal_auth'), 'exclude': lambda f: f is None }}) + r"""Interpret password as a client secret for a Microsft Entra service principal""" + entra_tenant_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('entra_tenant_id'), 'exclude': lambda f: f is None }}) + r"""If using Entra service principal, the ID of the tenant""" jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.""" password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_sap_hana_enterprise.py b/src/airbyte_api/models/source_sap_hana_enterprise.py index 1277f63b..3739ecb2 100644 --- a/src/airbyte_api/models/source_sap_hana_enterprise.py +++ b/src/airbyte_api/models/source_sap_hana_enterprise.py @@ -99,6 +99,19 @@ class SourceSapHanaEnterpriseUnencrypted: +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSapHanaEnterpriseTableFilter: + r"""Inclusion filter configuration for table selection per schema.""" + schema_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schema_name') }}) + r"""The name of the schema to apply this filter to. Should match a schema defined in \\"Schemas\\" field above.""" + table_name_patterns: List[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('table_name_patterns') }}) + r"""List of table name patterns to include from this schema. Each filter should be a SQL LIKE pattern.""" + additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) + + + + class SapHanaEnterprise(str, Enum): SAP_HANA_ENTERPRISE = 'sap-hana-enterprise' @@ -180,14 +193,18 @@ class SourceSapHanaEnterprise: r"""How often (in seconds) a stream should checkpoint, when possible.""" concurrency: Optional[int] = dataclasses.field(default=1, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('concurrency'), 'exclude': lambda f: f is None }}) r"""Maximum number of concurrent queries to the database.""" + database: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database'), 'exclude': lambda f: f is None }}) + r"""The name of the tenant database to connect to. This is required for multi-tenant SAP HANA systems. For single-tenant systems, this can be left empty.""" + filters: Optional[List[SourceSapHanaEnterpriseTableFilter]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filters'), 'exclude': lambda f: f is None }}) + r"""Inclusion filters for table selection per schema. If no filters are specified for a schema, all tables in that schema will be synced.""" jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).""" password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) r"""The password associated with the username.""" port: Optional[int] = dataclasses.field(default=443, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port'), 'exclude': lambda f: f is None }}) r"""Port of the database. - SapHana Corporations recommends the following port numbers: - 443 - Default listening port for SAP HANA cloud client connections to the listener. + SAP recommends the following port numbers: + 443 - Default listening port for SAP HANA Cloud client connections to the listener. """ schemas: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schemas'), 'exclude': lambda f: f is None }}) r"""The list of schemas to sync from. Defaults to user. Case sensitive.""" diff --git a/src/airbyte_api/models/source_sharepoint_enterprise.py b/src/airbyte_api/models/source_sharepoint_enterprise.py index 73d3e443..b2536885 100644 --- a/src/airbyte_api/models/source_sharepoint_enterprise.py +++ b/src/airbyte_api/models/source_sharepoint_enterprise.py @@ -52,6 +52,8 @@ class SourceSharepointEnterpriseAuthenticateViaMicrosoftOAuth: AUTH_TYPE: Final[Optional[SourceSharepointEnterpriseAuthType]] = dataclasses.field(default=SourceSharepointEnterpriseAuthType.CLIENT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type'), 'exclude': lambda f: f is None }}) refresh_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('refresh_token'), 'exclude': lambda f: f is None }}) r"""Refresh Token of your Microsoft developer application""" + scopes: Optional[str] = dataclasses.field(default='offline_access Files.Read.All Sites.Read.All Sites.Selected User.Read.All Group.Read.All Application.Read.All Device.Read.All', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('scopes'), 'exclude': lambda f: f is None }}) + r"""Scopes to request when authorizing. If you want to change scopes after source was created, you need to Re-authenticate to actually apply this change to your access token.""" @@ -319,6 +321,8 @@ class SourceSharepointEnterprise: streams: List[SourceSharepointEnterpriseFileBasedStreamConfig] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('streams') }}) r"""Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.""" delivery_method: Optional[SourceSharepointEnterpriseDeliveryMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delivery_method'), 'exclude': lambda f: f is None }}) + file_contains_query: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('file_contains_query'), 'exclude': lambda f: f is None }}) + r"""Input additional query to search files. It will make search files step faster if your Sharepoint account has a lot of files and folders. This query text will be used in the request that will look for files which properties contains inserted text. You can use multiple query texts, they will be applied in search request one by one.""" folder_path: Optional[str] = dataclasses.field(default='.', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('folder_path'), 'exclude': lambda f: f is None }}) r"""Path to a specific folder within the drives to search for files. Leave empty to search all folders of the drives. This does not apply to shared items.""" search_scope: Optional[SourceSharepointEnterpriseSearchScope] = dataclasses.field(default=SourceSharepointEnterpriseSearchScope.ALL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('search_scope'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_slack.py b/src/airbyte_api/models/source_slack.py index b95d2ec3..eed1ecc1 100644 --- a/src/airbyte_api/models/source_slack.py +++ b/src/airbyte_api/models/source_slack.py @@ -60,7 +60,7 @@ class SourceSlack: include_private_channels: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('include_private_channels'), 'exclude': lambda f: f is None }}) r"""Whether to read information from private channels that the bot is already in. If false, only public channels will be read. If true, the bot must be manually added to private channels.""" join_channels: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('join_channels'), 'exclude': lambda f: f is None }}) - r"""Whether to join all channels or to sync data only from channels the bot is already in. If false, you'll need to manually add the bot to all the channels from which you'd like to sync messages.""" + r"""Whether to join all channels or to sync data only from channels the bot is already in. If false, you''ll need to manually add the bot to all the channels from which you''d like to sync messages.""" lookback_window: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window'), 'exclude': lambda f: f is None }}) r"""How far into the past to look for messages in threads, default is 0 days""" num_workers: Optional[int] = dataclasses.field(default=2, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_workers'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_ticktick.py b/src/airbyte_api/models/source_ticktick.py new file mode 100644 index 00000000..62c9e5c6 --- /dev/null +++ b/src/airbyte_api/models/source_ticktick.py @@ -0,0 +1,55 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, Optional, Union + + +class SourceTicktickSchemasAuthType(str, Enum): + TOKEN = 'Token' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class BearerTokenFromOauth2: + bearer_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('bearer_token') }}) + r"""Access token for making authenticated requests; filled after complete oauth2 flow.""" + AUTH_TYPE: Final[SourceTicktickSchemasAuthType] = dataclasses.field(default=SourceTicktickSchemasAuthType.TOKEN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) + + + + +class SourceTicktickAuthType(str, Enum): + OAUTH = 'Oauth' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class OAuth2: + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + r"""The client ID of your Ticktick application. Read more here.""" + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + r"""The client secret of of your Ticktick application. application. Read more here.""" + AUTH_TYPE: Final[SourceTicktickAuthType] = dataclasses.field(default=SourceTicktickAuthType.OAUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) + client_access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_access_token'), 'exclude': lambda f: f is None }}) + r"""Access token for making authenticated requests; filled after complete oauth2 flow.""" + + + + +class SourceTicktickTicktick(str, Enum): + TICKTICK = 'ticktick' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceTicktick: + authorization: Optional[SourceTicktickAuthenticationType] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authorization'), 'exclude': lambda f: f is None }}) + SOURCE_TYPE: Final[Optional[SourceTicktickTicktick]] = dataclasses.field(default=SourceTicktickTicktick.TICKTICK, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType'), 'exclude': lambda f: f is None }}) + + + +SourceTicktickAuthenticationType = Union[OAuth2, BearerTokenFromOauth2] diff --git a/src/airbyte_api/models/source_twilio.py b/src/airbyte_api/models/source_twilio.py index 5a29185f..137b5857 100644 --- a/src/airbyte_api/models/source_twilio.py +++ b/src/airbyte_api/models/source_twilio.py @@ -25,6 +25,8 @@ class SourceTwilio: r"""UTC date and time in the format 2020-10-01T00:00:00Z. Any data before this date will not be replicated.""" lookback_window: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window'), 'exclude': lambda f: f is None }}) r"""How far into the past to look for records. (in minutes)""" + num_worker: Optional[int] = dataclasses.field(default=3, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_worker'), 'exclude': lambda f: f is None }}) + r"""The number of worker threads to use for the sync.""" SOURCE_TYPE: Final[Twilio] = dataclasses.field(default=Twilio.TWILIO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_uptick.py b/src/airbyte_api/models/source_uptick.py index a49c46fb..9315db94 100644 --- a/src/airbyte_api/models/source_uptick.py +++ b/src/airbyte_api/models/source_uptick.py @@ -2,12 +2,10 @@ from __future__ import annotations import dataclasses -import dateutil.parser from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json -from datetime import datetime from enum import Enum -from typing import Final, Optional +from typing import Final class Uptick(str, Enum): @@ -18,18 +16,11 @@ class Uptick(str, Enum): @dataclasses.dataclass class SourceUptick: base_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('base_url') }}) - r"""Ex: https://demo-fire.onuptick.com/""" + r"""eg. https://demo-fire.onuptick.com (no trailing slash)""" client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) - client_refresh_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_refresh_token') }}) client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) - end_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'exclude': lambda f: f is None }}) - r"""Fetch data up until this date""" - oauth_access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('oauth_access_token'), 'exclude': lambda f: f is None }}) - r"""The current access token. This field might be overridden by the connector based on the token refresh endpoint response.""" - oauth_token_expiry_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('oauth_token_expiry_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) - r"""The date the current access token expires in. This field might be overridden by the connector based on the token refresh endpoint response.""" + password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) SOURCE_TYPE: Final[Uptick] = dataclasses.field(default=Uptick.UPTICK, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) - start_date: Optional[str] = dataclasses.field(default='2025-01-01', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'exclude': lambda f: f is None }}) - r"""Fetch data starting from this date (by default 2025-01-01)""" diff --git a/src/airbyte_api/models/source_workday.py b/src/airbyte_api/models/source_workday.py index c4822039..4093c20d 100644 --- a/src/airbyte_api/models/source_workday.py +++ b/src/airbyte_api/models/source_workday.py @@ -2,42 +2,26 @@ from __future__ import annotations import dataclasses -import dateutil.parser from airbyte_api import utils from dataclasses_json import Undefined, dataclass_json -from datetime import datetime from enum import Enum -from typing import Any, Final, List, Optional, Union - - -class Rest(str, Enum): - REST = 'REST' +from typing import Final, List, Optional @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class RESTAPIStreams: - access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) - r"""Follow the instructions in the \\"OAuth 2.0 in Postman - API Client for Integrations\\" article in the Workday community docs to obtain access token.""" - AUTH_TYPE: Final[Rest] = dataclasses.field(default=Rest.REST, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) - start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) - r"""Rows after this date will be synced, default 2 years ago.""" +class SourceWorkdayAuthentication: + r"""Credentials for connecting to the Workday (RAAS) API.""" + password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) -class Raas(str, Enum): - RAAS = 'RAAS' - - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class ReportBasedStreams: - password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) - report_ids: List[Any] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('report_ids') }}) - r"""Report IDs can be found by clicking the three dots on the right side of the report > Web Service > View URLs > in JSON url copy everything between Workday tenant/ and ?format=json.""" - username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) - AUTH_TYPE: Final[Raas] = dataclasses.field(default=Raas.RAAS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('auth_type') }}) +class ReportIds: + report_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('report_id'), 'exclude': lambda f: f is None }}) @@ -50,11 +34,13 @@ class Workday(str, Enum): @dataclasses.dataclass class SourceWorkday: credentials: SourceWorkdayAuthentication = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }}) - r"""Report Based Streams and REST API Streams use different methods of Authentication. Choose streams type you want to sync and provide needed credentials for them.""" + r"""Credentials for connecting to the Workday (RAAS) API.""" host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }}) + report_ids: List[ReportIds] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('report_ids') }}) + r"""Report IDs can be found by clicking the three dots on the right side of the report > Web Service > View URLs > in JSON url copy everything between Workday tenant/ and ?format=json.""" tenant_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tenant_id') }}) + num_workers: Optional[int] = dataclasses.field(default=10, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_workers'), 'exclude': lambda f: f is None }}) + r"""The number of worker threads to use for the sync.""" SOURCE_TYPE: Final[Workday] = dataclasses.field(default=Workday.WORKDAY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) - -SourceWorkdayAuthentication = Union[ReportBasedStreams, RESTAPIStreams] diff --git a/src/airbyte_api/models/source_workday_rest.py b/src/airbyte_api/models/source_workday_rest.py new file mode 100644 index 00000000..3706d9a3 --- /dev/null +++ b/src/airbyte_api/models/source_workday_rest.py @@ -0,0 +1,40 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceWorkdayRestAuthentication: + r"""Credentials for connecting to the Workday (REST) API.""" + access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) + r"""Follow the instructions in the \\"OAuth 2.0 in Postman - API Client for Integrations\\" article in the Workday community docs to obtain access token.""" + + + + +class WorkdayRest(str, Enum): + WORKDAY_REST = 'workday-rest' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceWorkdayRest: + credentials: SourceWorkdayRestAuthentication = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }}) + r"""Credentials for connecting to the Workday (REST) API.""" + host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }}) + tenant_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tenant_id') }}) + num_workers: Optional[int] = dataclasses.field(default=20, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('num_workers'), 'exclude': lambda f: f is None }}) + r"""The number of worker threads to use for the sync.""" + SOURCE_TYPE: Final[WorkdayRest] = dataclasses.field(default=WorkdayRest.WORKDAY_REST, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) + r"""Rows after this date will be synced, default 2 years ago.""" + + diff --git a/src/airbyte_api/models/sourceconfiguration.py b/src/airbyte_api/models/sourceconfiguration.py index 70bca326..20fcef98 100644 --- a/src/airbyte_api/models/sourceconfiguration.py +++ b/src/airbyte_api/models/sourceconfiguration.py @@ -4,6 +4,8 @@ from .source_100ms import Source100ms from .source_7shifts import Source7shifts from .source_activecampaign import SourceActivecampaign +from .source_acuity_scheduling import SourceAcuityScheduling +from .source_adobe_commerce_magento import SourceAdobeCommerceMagento from .source_agilecrm import SourceAgilecrm from .source_aha import SourceAha from .source_airbyte import SourceAirbyte @@ -102,8 +104,11 @@ from .source_customer_io import SourceCustomerIo from .source_customerly import SourceCustomerly from .source_datadog import SourceDatadog +from .source_datagen import SourceDatagen from .source_datascope import SourceDatascope +from .source_db2_enterprise import SourceDb2Enterprise from .source_dbt import SourceDbt +from .source_defillama import SourceDefillama from .source_delighted import SourceDelighted from .source_deputy import SourceDeputy from .source_ding_connect import SourceDingConnect @@ -264,6 +269,7 @@ from .source_mailjet_sms import SourceMailjetSms from .source_mailosaur import SourceMailosaur from .source_mailtrap import SourceMailtrap +from .source_mantle import SourceMantle from .source_marketo import SourceMarketo from .source_marketstack import SourceMarketstack from .source_mendeley import SourceMendeley @@ -271,6 +277,7 @@ from .source_mercado_ads import SourceMercadoAds from .source_merge import SourceMerge from .source_metabase import SourceMetabase +from .source_metricool import SourceMetricool from .source_microsoft_dataverse import SourceMicrosoftDataverse from .source_microsoft_entra_id import SourceMicrosoftEntraID from .source_microsoft_lists import SourceMicrosoftLists @@ -324,6 +331,7 @@ from .source_orb import SourceOrb from .source_oura import SourceOura from .source_outbrain_amplify import SourceOutbrainAmplify +from .source_outlook import SourceOutlook from .source_outreach import SourceOutreach from .source_oveit import SourceOveit from .source_pabbly_subscriptions_billing import SourcePabblySubscriptionsBilling @@ -470,6 +478,7 @@ from .source_thrive_learning import SourceThriveLearning from .source_ticketmaster import SourceTicketmaster from .source_tickettailor import SourceTickettailor +from .source_ticktick import SourceTicktick from .source_tiktok_marketing import SourceTiktokMarketing from .source_timely import SourceTimely from .source_tinyemail import SourceTinyemail @@ -512,6 +521,7 @@ from .source_wordpress import SourceWordpress from .source_workable import SourceWorkable from .source_workday import SourceWorkday +from .source_workday_rest import SourceWorkdayRest from .source_workflowmax import SourceWorkflowmax from .source_workramp import SourceWorkramp from .source_wrike import SourceWrike @@ -548,4 +558,4 @@ from .source_zoom import SourceZoom from typing import Union -SourceConfiguration = Union[SourceAha, Source100ms, Source7shifts, SourceActivecampaign, SourceAgilecrm, SourceAirbyte, SourceAircall, SourceAirtable, SourceAkeneo, SourceAlgolia, SourceAlpacaBrokerAPI, SourceAlphaVantage, SourceAmazonAds, SourceAmazonSellerPartner, SourceAmazonSqs, SourceAmplitude, SourceApifyDataset, SourceAppcues, SourceAppfigures, SourceAppfollow, SourceAppleSearchAds, SourceAppsflyer, SourceApptivo, SourceAsana, SourceAshby, SourceAssemblyai, SourceAuth0, SourceAviationstack, SourceAwinAdvertiser, SourceAwsCloudtrail, SourceAzureBlobStorage, SourceAzureTable, SourceBabelforce, SourceBambooHr, SourceBasecamp, SourceBeamer, SourceBigmailer, SourceBigquery, SourceBingAds, SourceBitly, SourceBlogger, SourceBluetally, SourceBoldsign, SourceBox, SourceBraintree, SourceBraze, SourceBreezometer, SourceBreezyHr, SourceBrevo, SourceBrex, SourceBugsnag, SourceBuildkite, SourceBunnyInc, SourceBuzzsprout, SourceCalCom, SourceCalendly, SourceCallrail, SourceCampaignMonitor, SourceCampayn, SourceCanny, SourceCapsuleCrm, SourceCaptainData, SourceCareQualityCommission, SourceCart, SourceCastorEdc, SourceChameleon, SourceChargebee, SourceChargedesk, SourceChargify, SourceChartmogul, SourceChurnkey, SourceCimis, SourceCin7, SourceCirca, SourceCircleci, SourceCiscoMeraki, SourceClarifAi, SourceClazar, SourceClickhouse, SourceClickupAPI, SourceClockify, SourceClockodo, SourceCloseCom, SourceCloudbeds, SourceCoassemble, SourceCoda, SourceCodefresh, SourceCoinAPI, SourceCoingeckoCoins, SourceCoinmarketcap, SourceConcord, SourceConfigcat, SourceConfluence, SourceConvertkit, SourceConvex, SourceCopper, SourceCouchbase, SourceCountercyclical, SourceCustomerIo, SourceCustomerly, SourceDatadog, SourceDatascope, SourceDbt, SourceDelighted, SourceDeputy, SourceDingConnect, SourceDixa, SourceDockerhub, SourceDocuseal, SourceDolibarr, SourceDremio, SourceDrift, SourceDrip, SourceDropboxSign, SourceDwolla, SourceDynamodb, SourceEConomic, SourceEasypost, SourceEasypromos, SourceEbayFinance, SourceEbayFulfillment, SourceElasticemail, SourceElasticsearch, SourceEmailoctopus, SourceEmploymentHero, SourceEncharge, SourceEventbrite, SourceEventee, SourceEventzilla, SourceEverhour, SourceExchangeRates, SourceEzofficeinventory, SourceFacebookMarketing, SourceFacebookPages, SourceFactorial, SourceFaker, SourceFastbill, SourceFastly, SourceFauna, SourceFile, SourceFillout, SourceFinage, SourceFinancialModelling, SourceFinnhub, SourceFinnworlds, SourceFirebolt, SourceFirehydrant, SourceFleetio, SourceFlexmail, SourceFlexport, SourceFloat, SourceFlowlu, SourceFormbricks, SourceFreeAgentConnector, SourceFreightview, SourceFreshbooks, SourceFreshcaller, SourceFreshchat, SourceFreshdesk, SourceFreshsales, SourceFreshservice, SourceFront, SourceFulcrum, SourceFullstory, SourceGainsightPx, SourceGcs, SourceGetgist, SourceGetlago, SourceGiphy, SourceGitbook, SourceGithub, SourceGitlab, SourceGlassfrog, SourceGmail, SourceGnews, SourceGocardless, SourceGoldcast, SourceGologin, SourceGong, SourceGoogleAds, SourceGoogleAnalyticsDataAPI, SourceGoogleCalendar, SourceGoogleClassroom, SourceGoogleDirectory, SourceGoogleDrive, SourceGoogleForms, SourceGooglePagespeedInsights, SourceGoogleSearchConsole, SourceGoogleSheets, SourceGoogleTasks, SourceGoogleWebfonts, SourceGorgias, SourceGreenhouse, SourceGreythr, SourceGridly, SourceGuru, SourceGutendex, SourceHardcodedRecords, SourceHarness, SourceHarvest, SourceHeight, SourceHellobaton, SourceHelpScout, SourceHibob, SourceHighLevel, SourceHoorayhr, SourceHubplanner, SourceHubspot, SourceHuggingFaceDatasets, SourceHumanitix, SourceHuntr, SourceIlluminaBasespace, SourceImagga, SourceIncidentIo, SourceInflowinventory, SourceInsightful, SourceInsightly, SourceInstagram, SourceInstatus, SourceIntercom, SourceIntruder, SourceInvoiced, SourceInvoiceninja, SourceIp2whois, SourceIterable, SourceJamfPro, SourceJira, SourceJobnimbus, SourceJotform, SourceJudgeMeReviews, SourceJustSift, SourceJustcall, SourceK6Cloud, SourceKatana, SourceKeka, SourceKisi, SourceKissmetrics, SourceKlarna, SourceKlausAPI, SourceKlaviyo, SourceKyve, SourceLaunchdarkly, SourceLeadfeeder, SourceLemlist, SourceLessAnnoyingCrm, SourceLeverHiring, SourceLightspeedRetail, SourceLinear, SourceLinkedinAds, SourceLinkedinPages, SourceLinnworks, SourceLob, SourceLokalise, SourceLooker, SourceLuma, SourceMailchimp, SourceMailerlite, SourceMailersend, SourceMailgun, SourceMailjetMail, SourceMailjetSms, SourceMailosaur, SourceMailtrap, SourceMarketo, SourceMarketstack, SourceMendeley, SourceMention, SourceMercadoAds, SourceMerge, SourceMetabase, SourceMicrosoftDataverse, SourceMicrosoftEntraID, SourceMicrosoftLists, SourceMicrosoftOnedrive, SourceMicrosoftSharepoint, SourceMicrosoftTeams, SourceMiro, SourceMissive, SourceMixmax, SourceMixpanel, SourceMode, SourceMonday, SourceMongodbV2, SourceMssql, SourceMux, SourceMyHours, SourceMysql, SourceN8n, SourceNasa, SourceNavan, SourceNebiusAi, SourceNetsuite, SourceNetsuiteEnterprise, SourceNewsAPI, SourceNewsdata, SourceNewsdataIo, SourceNexiopay, SourceNinjaoneRmm, SourceNocrm, SourceNorthpassLms, SourceNotion, SourceNutshell, SourceNylas, SourceNytimes, SourceOkta, SourceOmnisend, SourceOncehub, SourceOnepagecrm, SourceOnesignal, SourceOnfleet, SourceOpenDataDc, SourceOpenExchangeRates, SourceOpenaq, SourceOpenfda, SourceOpenweather, SourceOpinionStage, SourceOpsgenie, SourceOpuswatch, SourceOracle, SourceOracleEnterprise, SourceOrb, SourceOura, SourceOutbrainAmplify, SourceOutreach, SourceOveit, SourcePabblySubscriptionsBilling, SourcePaddle, SourcePagerduty, SourcePandadoc, SourcePaperform, SourcePapersign, SourcePardot, SourcePartnerize, SourcePartnerstack, SourcePayfit, SourcePaypalTransaction, SourcePaystack, SourcePendo, SourcePennylane, SourcePerigon, SourcePersistiq, SourcePersona, SourcePexelsAPI, SourcePhyllo, SourcePicqer, SourcePingdom, SourcePinterest, SourcePipedrive, SourcePipeliner, SourcePivotalTracker, SourcePiwik, SourcePlaid, SourcePlanhat, SourcePlausible, SourcePocket, SourcePokeapi, SourcePolygonStockAPI, SourcePoplar, SourcePostgres, SourcePosthog, SourcePostmarkapp, SourcePrestashop, SourcePretix, SourcePrimetric, SourcePrintify, SourceProductboard, SourceProductive, SourcePypi, SourceQualaroo, SourceQuickbooks, SourceRailz, SourceRdStationMarketing, SourceRecharge, SourceRecreation, SourceRecruitee, SourceRecurly, SourceReddit, SourceRedshift, SourceReferralhero, SourceRentcast, SourceRepairshopr, SourceReplyIo, SourceRetailexpressByMaropost, SourceRetently, SourceRevenuecat, SourceRevolutMerchant, SourceRingcentral, SourceRkiCovid, SourceRocketChat, SourceRocketlane, SourceRollbar, SourceRootly, SourceRss, SourceRuddr, SourceS3, SourceSafetyculture, SourceSageHr, SourceSalesflare, SourceSalesforce, SourceSalesloft, SourceSapFieldglass, SourceSapHanaEnterprise, SourceSavvycal, SourceScryfall, SourceSecoda, SourceSegment, SourceSendgrid, SourceSendinblue, SourceSendowl, SourceSendpulse, SourceSenseforce, SourceSentry, SourceSerpstat, SourceServiceNow, SourceSftp, SourceSftpBulk, SourceSharepointEnterprise, SourceSharetribe, SourceShippo, SourceShipstation, SourceShopify, SourceShopwired, SourceShortcut, SourceShortio, SourceShutterstock, SourceSigmaComputing, SourceSignnow, SourceSimfin, SourceSimplecast, SourceSimplesat, SourceSlack, SourceSmaily, SourceSmartengage, SourceSmartreach, SourceSmartsheets, SourceSmartwaiver, SourceSnapchatMarketing, SourceSnowflake, SourceSolarwindsServiceDesk, SourceSonarCloud, SourceSpacexAPI, SourceSparkpost, SourceSplitIo, SourceSpotifyAds, SourceSpotlercrm, SourceSquare, SourceSquarespace, SourceStatsig, SourceStatuspage, SourceStockdata, SourceStrava, SourceStripe, SourceSurveySparrow, SourceSurveymonkey, SourceSurvicate, SourceSvix, SourceSysteme, SourceTaboola, SourceTavus, SourceTeamtailor, SourceTeamwork, SourceTempo, SourceTestrail, SourceTheGuardianAPI, SourceThinkific, SourceThinkificCourses, SourceThriveLearning, SourceTicketmaster, SourceTickettailor, SourceTiktokMarketing, SourceTimely, SourceTinyemail, SourceTmdb, SourceTodoist, SourceToggl, SourceTrackPms, SourceTrello, SourceTremendous, SourceTrustpilot, SourceTvmazeSchedule, SourceTwelveData, SourceTwilio, SourceTwilioTaskrouter, SourceTwitter, SourceTyntecSms, SourceTypeform, SourceUbidots, SourceUnleash, SourceUppromote, SourceUptick, SourceUsCensus, SourceUservoice, SourceVantage, SourceVeeqo, SourceVercel, SourceVismaEconomic, SourceVitally, SourceVwo, SourceWaiteraid, SourceWasabiStatsAPI, SourceWatchmode, SourceWeatherstack, SourceWebScrapper, SourceWebflow, SourceWhenIWork, SourceWhiskyHunter, SourceWikipediaPageviews, SourceWoocommerce, SourceWordpress, SourceWorkable, SourceWorkday, SourceWorkflowmax, SourceWorkramp, SourceWrike, SourceWufoo, SourceXkcd, SourceXsolla, SourceYahooFinancePrice, SourceYandexMetrica, SourceYotpo, SourceYouNeedABudgetYnab, SourceYounium, SourceYousign, SourceYoutubeAnalytics, SourceYoutubeData, SourceZapierSupportedStorage, SourceZapsign, SourceZendeskChat, SourceZendeskSunshine, SourceZendeskSupport, SourceZendeskTalk, SourceZenefits, SourceZenloop, SourceZohoAnalyticsMetadataAPI, SourceZohoBigin, SourceZohoBilling, SourceZohoBooks, SourceZohoCampaign, SourceZohoCrm, SourceZohoDesk, SourceZohoExpense, SourceZohoInventory, SourceZohoInvoice, SourceZonkaFeedback, SourceZoom] +SourceConfiguration = Union[SourceAha, Source100ms, Source7shifts, SourceActivecampaign, SourceAcuityScheduling, SourceAdobeCommerceMagento, SourceAgilecrm, SourceAirbyte, SourceAircall, SourceAirtable, SourceAkeneo, SourceAlgolia, SourceAlpacaBrokerAPI, SourceAlphaVantage, SourceAmazonAds, SourceAmazonSellerPartner, SourceAmazonSqs, SourceAmplitude, SourceApifyDataset, SourceAppcues, SourceAppfigures, SourceAppfollow, SourceAppleSearchAds, SourceAppsflyer, SourceApptivo, SourceAsana, SourceAshby, SourceAssemblyai, SourceAuth0, SourceAviationstack, SourceAwinAdvertiser, SourceAwsCloudtrail, SourceAzureBlobStorage, SourceAzureTable, SourceBabelforce, SourceBambooHr, SourceBasecamp, SourceBeamer, SourceBigmailer, SourceBigquery, SourceBingAds, SourceBitly, SourceBlogger, SourceBluetally, SourceBoldsign, SourceBox, SourceBraintree, SourceBraze, SourceBreezometer, SourceBreezyHr, SourceBrevo, SourceBrex, SourceBugsnag, SourceBuildkite, SourceBunnyInc, SourceBuzzsprout, SourceCalCom, SourceCalendly, SourceCallrail, SourceCampaignMonitor, SourceCampayn, SourceCanny, SourceCapsuleCrm, SourceCaptainData, SourceCareQualityCommission, SourceCart, SourceCastorEdc, SourceChameleon, SourceChargebee, SourceChargedesk, SourceChargify, SourceChartmogul, SourceChurnkey, SourceCimis, SourceCin7, SourceCirca, SourceCircleci, SourceCiscoMeraki, SourceClarifAi, SourceClazar, SourceClickhouse, SourceClickupAPI, SourceClockify, SourceClockodo, SourceCloseCom, SourceCloudbeds, SourceCoassemble, SourceCoda, SourceCodefresh, SourceCoinAPI, SourceCoingeckoCoins, SourceCoinmarketcap, SourceConcord, SourceConfigcat, SourceConfluence, SourceConvertkit, SourceConvex, SourceCopper, SourceCouchbase, SourceCountercyclical, SourceCustomerIo, SourceCustomerly, SourceDatadog, SourceDatagen, SourceDatascope, SourceDb2Enterprise, SourceDbt, SourceDefillama, SourceDelighted, SourceDeputy, SourceDingConnect, SourceDixa, SourceDockerhub, SourceDocuseal, SourceDolibarr, SourceDremio, SourceDrift, SourceDrip, SourceDropboxSign, SourceDwolla, SourceDynamodb, SourceEConomic, SourceEasypost, SourceEasypromos, SourceEbayFinance, SourceEbayFulfillment, SourceElasticemail, SourceElasticsearch, SourceEmailoctopus, SourceEmploymentHero, SourceEncharge, SourceEventbrite, SourceEventee, SourceEventzilla, SourceEverhour, SourceExchangeRates, SourceEzofficeinventory, SourceFacebookMarketing, SourceFacebookPages, SourceFactorial, SourceFaker, SourceFastbill, SourceFastly, SourceFauna, SourceFile, SourceFillout, SourceFinage, SourceFinancialModelling, SourceFinnhub, SourceFinnworlds, SourceFirebolt, SourceFirehydrant, SourceFleetio, SourceFlexmail, SourceFlexport, SourceFloat, SourceFlowlu, SourceFormbricks, SourceFreeAgentConnector, SourceFreightview, SourceFreshbooks, SourceFreshcaller, SourceFreshchat, SourceFreshdesk, SourceFreshsales, SourceFreshservice, SourceFront, SourceFulcrum, SourceFullstory, SourceGainsightPx, SourceGcs, SourceGetgist, SourceGetlago, SourceGiphy, SourceGitbook, SourceGithub, SourceGitlab, SourceGlassfrog, SourceGmail, SourceGnews, SourceGocardless, SourceGoldcast, SourceGologin, SourceGong, SourceGoogleAds, SourceGoogleAnalyticsDataAPI, SourceGoogleCalendar, SourceGoogleClassroom, SourceGoogleDirectory, SourceGoogleDrive, SourceGoogleForms, SourceGooglePagespeedInsights, SourceGoogleSearchConsole, SourceGoogleSheets, SourceGoogleTasks, SourceGoogleWebfonts, SourceGorgias, SourceGreenhouse, SourceGreythr, SourceGridly, SourceGuru, SourceGutendex, SourceHardcodedRecords, SourceHarness, SourceHarvest, SourceHeight, SourceHellobaton, SourceHelpScout, SourceHibob, SourceHighLevel, SourceHoorayhr, SourceHubplanner, SourceHubspot, SourceHuggingFaceDatasets, SourceHumanitix, SourceHuntr, SourceIlluminaBasespace, SourceImagga, SourceIncidentIo, SourceInflowinventory, SourceInsightful, SourceInsightly, SourceInstagram, SourceInstatus, SourceIntercom, SourceIntruder, SourceInvoiced, SourceInvoiceninja, SourceIp2whois, SourceIterable, SourceJamfPro, SourceJira, SourceJobnimbus, SourceJotform, SourceJudgeMeReviews, SourceJustSift, SourceJustcall, SourceK6Cloud, SourceKatana, SourceKeka, SourceKisi, SourceKissmetrics, SourceKlarna, SourceKlausAPI, SourceKlaviyo, SourceKyve, SourceLaunchdarkly, SourceLeadfeeder, SourceLemlist, SourceLessAnnoyingCrm, SourceLeverHiring, SourceLightspeedRetail, SourceLinear, SourceLinkedinAds, SourceLinkedinPages, SourceLinnworks, SourceLob, SourceLokalise, SourceLooker, SourceLuma, SourceMailchimp, SourceMailerlite, SourceMailersend, SourceMailgun, SourceMailjetMail, SourceMailjetSms, SourceMailosaur, SourceMailtrap, SourceMantle, SourceMarketo, SourceMarketstack, SourceMendeley, SourceMention, SourceMercadoAds, SourceMerge, SourceMetabase, SourceMetricool, SourceMicrosoftDataverse, SourceMicrosoftEntraID, SourceMicrosoftLists, SourceMicrosoftOnedrive, SourceMicrosoftSharepoint, SourceMicrosoftTeams, SourceMiro, SourceMissive, SourceMixmax, SourceMixpanel, SourceMode, SourceMonday, SourceMongodbV2, SourceMssql, SourceMux, SourceMyHours, SourceMysql, SourceN8n, SourceNasa, SourceNavan, SourceNebiusAi, SourceNetsuite, SourceNetsuiteEnterprise, SourceNewsAPI, SourceNewsdata, SourceNewsdataIo, SourceNexiopay, SourceNinjaoneRmm, SourceNocrm, SourceNorthpassLms, SourceNotion, SourceNutshell, SourceNylas, SourceNytimes, SourceOkta, SourceOmnisend, SourceOncehub, SourceOnepagecrm, SourceOnesignal, SourceOnfleet, SourceOpenDataDc, SourceOpenExchangeRates, SourceOpenaq, SourceOpenfda, SourceOpenweather, SourceOpinionStage, SourceOpsgenie, SourceOpuswatch, SourceOracle, SourceOracleEnterprise, SourceOrb, SourceOura, SourceOutbrainAmplify, SourceOutlook, SourceOutreach, SourceOveit, SourcePabblySubscriptionsBilling, SourcePaddle, SourcePagerduty, SourcePandadoc, SourcePaperform, SourcePapersign, SourcePardot, SourcePartnerize, SourcePartnerstack, SourcePayfit, SourcePaypalTransaction, SourcePaystack, SourcePendo, SourcePennylane, SourcePerigon, SourcePersistiq, SourcePersona, SourcePexelsAPI, SourcePhyllo, SourcePicqer, SourcePingdom, SourcePinterest, SourcePipedrive, SourcePipeliner, SourcePivotalTracker, SourcePiwik, SourcePlaid, SourcePlanhat, SourcePlausible, SourcePocket, SourcePokeapi, SourcePolygonStockAPI, SourcePoplar, SourcePostgres, SourcePosthog, SourcePostmarkapp, SourcePrestashop, SourcePretix, SourcePrimetric, SourcePrintify, SourceProductboard, SourceProductive, SourcePypi, SourceQualaroo, SourceQuickbooks, SourceRailz, SourceRdStationMarketing, SourceRecharge, SourceRecreation, SourceRecruitee, SourceRecurly, SourceReddit, SourceRedshift, SourceReferralhero, SourceRentcast, SourceRepairshopr, SourceReplyIo, SourceRetailexpressByMaropost, SourceRetently, SourceRevenuecat, SourceRevolutMerchant, SourceRingcentral, SourceRkiCovid, SourceRocketChat, SourceRocketlane, SourceRollbar, SourceRootly, SourceRss, SourceRuddr, SourceS3, SourceSafetyculture, SourceSageHr, SourceSalesflare, SourceSalesforce, SourceSalesloft, SourceSapFieldglass, SourceSapHanaEnterprise, SourceSavvycal, SourceScryfall, SourceSecoda, SourceSegment, SourceSendgrid, SourceSendinblue, SourceSendowl, SourceSendpulse, SourceSenseforce, SourceSentry, SourceSerpstat, SourceServiceNow, SourceSftp, SourceSftpBulk, SourceSharepointEnterprise, SourceSharetribe, SourceShippo, SourceShipstation, SourceShopify, SourceShopwired, SourceShortcut, SourceShortio, SourceShutterstock, SourceSigmaComputing, SourceSignnow, SourceSimfin, SourceSimplecast, SourceSimplesat, SourceSlack, SourceSmaily, SourceSmartengage, SourceSmartreach, SourceSmartsheets, SourceSmartwaiver, SourceSnapchatMarketing, SourceSnowflake, SourceSolarwindsServiceDesk, SourceSonarCloud, SourceSpacexAPI, SourceSparkpost, SourceSplitIo, SourceSpotifyAds, SourceSpotlercrm, SourceSquare, SourceSquarespace, SourceStatsig, SourceStatuspage, SourceStockdata, SourceStrava, SourceStripe, SourceSurveySparrow, SourceSurveymonkey, SourceSurvicate, SourceSvix, SourceSysteme, SourceTaboola, SourceTavus, SourceTeamtailor, SourceTeamwork, SourceTempo, SourceTestrail, SourceTheGuardianAPI, SourceThinkific, SourceThinkificCourses, SourceThriveLearning, SourceTicketmaster, SourceTickettailor, SourceTicktick, SourceTiktokMarketing, SourceTimely, SourceTinyemail, SourceTmdb, SourceTodoist, SourceToggl, SourceTrackPms, SourceTrello, SourceTremendous, SourceTrustpilot, SourceTvmazeSchedule, SourceTwelveData, SourceTwilio, SourceTwilioTaskrouter, SourceTwitter, SourceTyntecSms, SourceTypeform, SourceUbidots, SourceUnleash, SourceUppromote, SourceUptick, SourceUsCensus, SourceUservoice, SourceVantage, SourceVeeqo, SourceVercel, SourceVismaEconomic, SourceVitally, SourceVwo, SourceWaiteraid, SourceWasabiStatsAPI, SourceWatchmode, SourceWeatherstack, SourceWebScrapper, SourceWebflow, SourceWhenIWork, SourceWhiskyHunter, SourceWikipediaPageviews, SourceWoocommerce, SourceWordpress, SourceWorkable, SourceWorkday, SourceWorkdayRest, SourceWorkflowmax, SourceWorkramp, SourceWrike, SourceWufoo, SourceXkcd, SourceXsolla, SourceYahooFinancePrice, SourceYandexMetrica, SourceYotpo, SourceYouNeedABudgetYnab, SourceYounium, SourceYousign, SourceYoutubeAnalytics, SourceYoutubeData, SourceZapierSupportedStorage, SourceZapsign, SourceZendeskChat, SourceZendeskSunshine, SourceZendeskSupport, SourceZendeskTalk, SourceZenefits, SourceZenloop, SourceZohoAnalyticsMetadataAPI, SourceZohoBigin, SourceZohoBilling, SourceZohoBooks, SourceZohoCampaign, SourceZohoCrm, SourceZohoDesk, SourceZohoExpense, SourceZohoInventory, SourceZohoInvoice, SourceZonkaFeedback, SourceZoom] diff --git a/src/airbyte_api/models/streammappertype.py b/src/airbyte_api/models/streammappertype.py index 4b5d8b75..720671d8 100644 --- a/src/airbyte_api/models/streammappertype.py +++ b/src/airbyte_api/models/streammappertype.py @@ -9,3 +9,4 @@ class StreamMapperType(str, Enum): FIELD_RENAMING = 'field-renaming' ROW_FILTERING = 'row-filtering' ENCRYPTION = 'encryption' + FIELD_FILTERING = 'field-filtering' diff --git a/src/airbyte_api/models/ticktick.py b/src/airbyte_api/models/ticktick.py new file mode 100644 index 00000000..001f10c2 --- /dev/null +++ b/src/airbyte_api/models/ticktick.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from typing import Optional + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class TicktickAuthorization: + client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }}) + r"""The client ID of your Ticktick application. Read more here.""" + client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }}) + r"""The client secret of of your Ticktick application. application. Read more here.""" + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class Ticktick: + authorization: Optional[TicktickAuthorization] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authorization'), 'exclude': lambda f: f is None }}) + + diff --git a/src/airbyte_api/sdkconfiguration.py b/src/airbyte_api/sdkconfiguration.py index 89b8fe98..99158c05 100644 --- a/src/airbyte_api/sdkconfiguration.py +++ b/src/airbyte_api/sdkconfiguration.py @@ -24,9 +24,9 @@ class SDKConfiguration: server_idx: Optional[int] = 0 language: str = 'python' openapi_doc_version: str = '1.0.0' - sdk_version: str = '0.53.0' + sdk_version: str = '0.53.1' gen_version: str = '2.536.0' - user_agent: str = 'speakeasy-sdk/python 0.53.0 2.536.0 1.0.0 airbyte-api' + user_agent: str = 'speakeasy-sdk/python 0.53.1 2.536.0 1.0.0 airbyte-api' retry_config: Optional[RetryConfig] = None def __post_init__(self):