From 58b6b800e39f51a642070e3a0049178670984b4f Mon Sep 17 00:00:00 2001 From: Patrick Nilan Date: Wed, 13 Mar 2024 13:18:45 -0700 Subject: [PATCH] Source Chargebee - Implement integration testing for otherwise untested streams [ITAS] (#35509) --- .../acceptance-test-config.yml | 18 +- .../integration_tests/configured_catalog.json | 17 +- .../integration_tests/expected_records.jsonl | 9 +- .../integration_tests/future_state.json | 21 ++ .../integration_tests/sample_state.json | 7 + .../connectors/source-chargebee/metadata.yaml | 2 +- .../connectors/source-chargebee/poetry.lock | 70 +++--- .../source-chargebee/pyproject.toml | 5 +- .../source_chargebee/manifest.yaml | 9 +- .../unit_tests/integration/__init__.py | 0 .../unit_tests/integration/config.py | 33 +++ .../unit_tests/integration/pagination.py | 11 + .../unit_tests/integration/request_builder.py | 127 +++++++++++ .../integration/response_builder.py | 14 ++ .../unit_tests/integration/test_addon.py | 201 +++++++++++++++++ .../unit_tests/integration/test_coupon.py | 212 +++++++++++++++++ .../unit_tests/integration/test_customer.py | 211 +++++++++++++++++ .../unit_tests/integration/test_event.py | 197 ++++++++++++++++ .../integration/test_hosted_page.py | 196 ++++++++++++++++ .../unit_tests/integration/test_plan.py | 201 +++++++++++++++++ .../integration/test_site_migration_detail.py | 213 ++++++++++++++++++ .../integration/test_subscription.py | 212 +++++++++++++++++ .../integration/test_virtual_bank_account.py | 201 +++++++++++++++++ .../resource/http/response/400.json | 7 + .../resource/http/response/401.json | 7 + .../resource/http/response/403.json | 7 + .../resource/http/response/429.json | 7 + .../resource/http/response/500.json | 7 + .../resource/http/response/addon.json | 33 +++ .../resource/http/response/coupon.json | 27 +++ .../resource/http/response/customer.json | 28 +++ .../resource/http/response/event.json | 204 +++++++++++++++++ .../resource/http/response/hosted_page.json | 18 ++ .../resource/http/response/plan.json | 37 +++ .../http/response/site_migration_detail.json | 15 ++ .../resource/http/response/subscription.json | 63 ++++++ .../http/response/virtual_bank_account.json | 24 ++ docs/integrations/sources/chargebee.md | 1 + 38 files changed, 2618 insertions(+), 54 deletions(-) create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/integration/__init__.py create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/integration/config.py create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/integration/pagination.py create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/integration/request_builder.py create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/integration/response_builder.py create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_addon.py create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_coupon.py create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_customer.py create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_event.py create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_hosted_page.py create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_plan.py create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_site_migration_detail.py create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_subscription.py create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_virtual_bank_account.py create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/400.json create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/401.json create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/403.json create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/429.json create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/500.json create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/addon.json create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/coupon.json create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/customer.json create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/event.json create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/hosted_page.json create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/plan.json create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/site_migration_detail.json create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/subscription.json create mode 100644 airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/virtual_bank_account.json diff --git a/airbyte-integrations/connectors/source-chargebee/acceptance-test-config.yml b/airbyte-integrations/connectors/source-chargebee/acceptance-test-config.yml index 850da25aec466..a1be4b948cf4c 100644 --- a/airbyte-integrations/connectors/source-chargebee/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-chargebee/acceptance-test-config.yml @@ -23,23 +23,23 @@ acceptance_tests: timeout_seconds: 1200 empty_streams: - name: "addon" - bypass_reason: "Not available for Product Catalog 2.0 sites." + bypass_reason: "Not available for Product Catalog 2.0 sites. Tested with mocker server tests." - name: "plan" - bypass_reason: "Not available for Product Catalog 2.0 sites." + bypass_reason: "Not available for Product Catalog 2.0 sites. Tested with mocker server tests." - name: "virtual_bank_account" - bypass_reason: "Cannot populate with test data" + bypass_reason: "Tested with mocker server tests." - name: "event" - bypass_reason: "Unstable data. Test data is not persistent." + bypass_reason: "Unstable data. Test data is not persistent. Tested with mocker server tests." - name: "site_migration_detail" - bypass_reason: "Cannot populate with test data." + bypass_reason: "Cannnot populate with test data." - name: "customer" - bypass_reason: "To be tested with integration tests." + bypass_reason: "Tested with mocker server tests." - name: "subscription" - bypass_reason: "To be tested with integration tests." + bypass_reason: "Tested with mocker server tests." - name: "coupon" - bypass_reason: "To be tested with integration tests." + bypass_reason: "Tested with mocker server tests." - name: "hosted_page" - bypass_reason: "To be tested with integration tests." + bypass_reason: "Tested with mocker server tests." expect_records: path: "integration_tests/expected_records.jsonl" exact_order: no diff --git a/airbyte-integrations/connectors/source-chargebee/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-chargebee/integration_tests/configured_catalog.json index 8adc9742557e9..1f502d4c73953 100644 --- a/airbyte-integrations/connectors/source-chargebee/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-chargebee/integration_tests/configured_catalog.json @@ -121,11 +121,11 @@ "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, "source_defined_primary_key": [["id"]], - "default_cursor_field": ["created_at"] + "default_cursor_field": ["updated_at"] }, "sync_mode": "incremental", "destination_sync_mode": "append", - "cursor_field": ["created_at"] + "cursor_field": ["updated_at"] }, { "stream": { @@ -227,6 +227,19 @@ "sync_mode": "incremental", "destination_sync_mode": "append", "cursor_field": ["updated_at"] + }, + { + "stream": { + "name": "site_migration_detail", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["migrated_at"], + "source_defined_primary_key": [["entity_id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append", + "cursor_field": ["migrated_at"] } ] } diff --git a/airbyte-integrations/connectors/source-chargebee/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-chargebee/integration_tests/expected_records.jsonl index 4576e894a548c..738efeb22afa9 100644 --- a/airbyte-integrations/connectors/source-chargebee/integration_tests/expected_records.jsonl +++ b/airbyte-integrations/connectors/source-chargebee/integration_tests/expected_records.jsonl @@ -7,10 +7,6 @@ {"stream": "item", "data": {"id": "cbdemo_advanced", "name": "Advanced", "external_name": "Advanced", "description": "Uncover hidden insights and carry out deeper analytics for your enterprise with this advanced plan.", "status": "active", "resource_version": 1674035640445, "updated_at": 1674035640, "item_family_id": "cbdemo_pf_analytics", "type": "plan", "is_shippable": true, "is_giftable": false, "enabled_for_checkout": true, "enabled_in_portal": true, "item_applicability": "all", "metered": false, "channel": "web", "metadata": {}, "object": "item", "custom_fields": []}, "emitted_at": 1678971136879} {"stream": "item", "data": {"id": "cbdemo_basic", "name": "Basic", "external_name": "Basic", "description": "Starter plan for all your basic reporting requirements.", "status": "active", "resource_version": 1674035673162, "updated_at": 1674035673, "item_family_id": "cbdemo_pf_analytics", "type": "plan", "is_shippable": true, "is_giftable": false, "enabled_for_checkout": true, "enabled_in_portal": true, "item_applicability": "all", "metered": false, "channel": "web", "metadata": {}, "object": "item", "custom_fields": []}, "emitted_at": 1678971136891} {"stream": "item", "data": {"id": "cbdemo_intermediary", "name": "Intermediary", "external_name": "Intermediary", "description": "Smart plan with the right mix of basic and slightly advanced reporting tools.", "status": "active", "resource_version": 1674035686971, "updated_at": 1674035686, "item_family_id": "cbdemo_pf_analytics", "type": "plan", "is_shippable": true, "is_giftable": false, "enabled_for_checkout": true, "enabled_in_portal": true, "item_applicability": "all", "metered": false, "channel": "web", "metadata": {}, "object": "item", "custom_fields": []}, "emitted_at": 1678971136900} -{"stream": "attached_item", "data": {"id": "e49c6ed7-9f1b-4c79-9235-549ce8ae9a1f", "parent_item_id": "cbdemo_advanced", "item_id": "cbdemo_setup_charge", "status": "active", "charge_on_event": "subscription_trial_start", "charge_once": false, "created_at": 1674032839, "resource_version": 1674032839573, "updated_at": 1674032839, "object": "attached_item", "custom_fields": []}, "emitted_at": 1676569205846} -{"stream": "attached_item", "data": {"id": "25976ccf-8e44-4fce-8eab-2a1658eb0a2b", "parent_item_id": "cbdemo_advanced", "item_id": "cbdemo_analytics_additionalusers", "type": "mandatory", "status": "active", "quantity": 1, "created_at": 1674032827, "resource_version": 1674032827801, "updated_at": 1674032827, "object": "attached_item", "custom_fields": []}, "emitted_at": 1676569205849} -{"stream": "attached_item", "data": {"id": "69b451b1-e00a-4522-ab6f-027586d24b85", "parent_item_id": "cbdemo_basic", "item_id": "cbdemo_setup_charge", "status": "active", "charge_on_event": "subscription_creation", "charge_once": false, "created_at": 1674032880, "resource_version": 1674032880261, "updated_at": 1674032880, "object": "attached_item", "custom_fields": []}, "emitted_at": 1676569206020} -{"stream": "item_price", "data": {"id": "Test-Plan-1-USD-Daily", "name": "Test Plan 1 USD Daily", "item_family_id": "cbdemo_pf_analytics", "item_id": "Test-Plan-1", "description": "Test", "status": "active", "external_name": "Test Plan 1", "pricing_model": "flat_fee", "price": 1000, "period": 1, "currency_code": "USD", "period_unit": "day", "shipping_period": 1, "shipping_period_unit": "day", "free_quantity": 0, "channel": "web", "resource_version": 1674036400224, "updated_at": 1674036400, "created_at": 1674036400, "invoice_notes": "Test", "is_taxable": true, "item_type": "plan", "show_description_in_invoices": true, "show_description_in_quotes": true, "object": "item_price", "custom_fields": []}, "emitted_at": 1678971392306} {"stream": "item_price", "data": {"id": "Test-Gift-Plan-1-USD-Daily", "name": "Test Gift Plan 1 USD Daily", "item_family_id": "cbdemo_pf_crm", "item_id": "Test-Gift-Plan-1", "description": "Test gift", "status": "active", "external_name": "Test Gift Plan 1", "pricing_model": "flat_fee", "price": 1500, "period": 1, "currency_code": "USD", "period_unit": "day", "shipping_period": 1, "shipping_period_unit": "day", "billing_cycles": 1, "free_quantity": 0, "channel": "web", "resource_version": 1674055340456, "updated_at": 1674055340, "created_at": 1674055340, "invoice_notes": "Test gift", "is_taxable": true, "item_type": "plan", "show_description_in_invoices": true, "show_description_in_quotes": true, "object": "item_price", "custom_fields": []}, "emitted_at": 1678971392312} {"stream": "item_price", "data": {"id": "Test-Gift-Plan-1-USD-Weekly", "name": "Test Gift Plan 1 USD Weekly", "item_family_id": "cbdemo_pf_crm", "item_id": "Test-Gift-Plan-1", "description": "Test", "status": "active", "external_name": "Test Gift Plan 1", "pricing_model": "flat_fee", "price": 20000, "period": 1, "currency_code": "USD", "period_unit": "week", "shipping_period": 1, "shipping_period_unit": "week", "billing_cycles": 1, "free_quantity": 0, "channel": "web", "resource_version": 1674056134136, "updated_at": 1674056134, "created_at": 1674056134, "is_taxable": true, "item_type": "plan", "show_description_in_invoices": true, "show_description_in_quotes": true, "object": "item_price", "custom_fields": []}, "emitted_at": 1678971392319} {"stream": "payment_source", "data": {"id": "pm_Azz5jBTTJ96QflvC", "updated_at": 1674057604, "resource_version": 1674057604123, "deleted": false, "object": "payment_source", "customer_id": "Azz5jBTTJ96Mjlv5", "type": "card", "reference_id": "tok_Azz5jBTTJ96QSlvA", "status": "valid", "gateway": "chargebee", "gateway_account_id": "gw_16CKmRSb2oGddH4", "ip_address": "85.209.47.207", "created_at": 1674057604, "card": {"iin": "411111", "last4": "1111", "funding_type": "credit", "expiry_month": 12, "expiry_year": 2029, "masked_number": "************1111", "object": "card", "brand": "visa"}, "custom_fields": []}, "emitted_at": 1678971627515} @@ -45,4 +41,7 @@ {"stream": "item_family", "data": {"id": "test-4", "name": "test item family 4", "status": "active", "resource_version": 1705960880668, "updated_at": 1705960880, "object": "item_family", "custom_fields": []}, "emitted_at": 1705960929497} {"stream": "item_family", "data": {"id": "test-3", "name": "test item family 3", "status": "active", "resource_version": 1705956309899, "updated_at": 1705956309, "object": "item_family", "custom_fields": []}, "emitted_at": 1705960929501} {"stream": "item_family", "data": {"id": "test-2", "name": "test item family 2", "status": "active", "resource_version": 1705956286577, "updated_at": 1705956286, "object": "item_family", "custom_fields": []}, "emitted_at": 1705960929506} -{"stream": "item_family", "data": {"id": "test-1", "name": "test item family 1", "status": "active", "resource_version": 1705956260965, "updated_at": 1705956260, "object": "item_family", "custom_fields": []}, "emitted_at": 1705960929509} \ No newline at end of file +{"stream": "item_family", "data": {"id": "test-1", "name": "test item family 1", "status": "active", "resource_version": 1705956260965, "updated_at": 1705956260, "object": "item_family", "custom_fields": []}, "emitted_at": 1705960929509} +{"stream": "attached_item", "data": {"id": "25976ccf-8e44-4fce-8eab-2a1658eb0a2b", "parent_item_id": "cbdemo_advanced", "item_id": "cbdemo_analytics_additionalusers", "type": "mandatory", "status": "active", "quantity": 1, "created_at": 1674032827, "resource_version": 1674032827801, "updated_at": 1674032827, "object": "attached_item", "custom_fields": []}, "emitted_at": 1708468907178} +{"stream": "attached_item", "data": {"id": "e49c6ed7-9f1b-4c79-9235-549ce8ae9a1f", "parent_item_id": "cbdemo_advanced", "item_id": "cbdemo_setup_charge", "status": "active", "charge_on_event": "subscription_trial_start", "charge_once": false, "created_at": 1674032839, "resource_version": 1674032839573, "updated_at": 1674032839, "object": "attached_item", "custom_fields": []}, "emitted_at": 1708468907177} +{"stream": "attached_item", "data": {"id": "69b451b1-e00a-4522-ab6f-027586d24b85", "parent_item_id": "cbdemo_basic", "item_id": "cbdemo_setup_charge", "status": "active", "charge_on_event": "subscription_creation", "charge_once": false, "created_at": 1674032880, "resource_version": 1674032880261, "updated_at": 1674032880, "object": "attached_item", "custom_fields": []}, "emitted_at": 1708468907297} diff --git a/airbyte-integrations/connectors/source-chargebee/integration_tests/future_state.json b/airbyte-integrations/connectors/source-chargebee/integration_tests/future_state.json index 24299a54ae846..73696eb5b8e58 100644 --- a/airbyte-integrations/connectors/source-chargebee/integration_tests/future_state.json +++ b/airbyte-integrations/connectors/source-chargebee/integration_tests/future_state.json @@ -48,6 +48,13 @@ "stream_descriptor": { "name": "item" } } }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": 2147483647 }, + "stream_descriptor": { "name": "attached_item" } + } + }, { "type": "STREAM", "stream": { @@ -124,5 +131,19 @@ "stream_state": { "updated_at": 2147483647 }, "stream_descriptor": { "name": "differential_price" } } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "migrated_at": 2147483647 }, + "stream_descriptor": { "name": "site_migration_detail" } + } + }, + { + "type": "STREAM", + "stream": { + "stream_state": { "occurred_at": 2147483647 }, + "stream_descriptor": { "name": "event" } + } } ] diff --git a/airbyte-integrations/connectors/source-chargebee/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-chargebee/integration_tests/sample_state.json index f8a43e7e1d9ab..49e8b43ee928a 100644 --- a/airbyte-integrations/connectors/source-chargebee/integration_tests/sample_state.json +++ b/airbyte-integrations/connectors/source-chargebee/integration_tests/sample_state.json @@ -48,6 +48,13 @@ "stream_descriptor": { "name": "item" } } }, + { + "type": "STREAM", + "stream": { + "stream_state": { "updated_at": 1625596058 }, + "stream_descriptor": { "name": "attached_item" } + } + }, { "type": "STREAM", "stream": { diff --git a/airbyte-integrations/connectors/source-chargebee/metadata.yaml b/airbyte-integrations/connectors/source-chargebee/metadata.yaml index 82b51e27cfae8..380ee9e33365d 100644 --- a/airbyte-integrations/connectors/source-chargebee/metadata.yaml +++ b/airbyte-integrations/connectors/source-chargebee/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: api connectorType: source definitionId: 686473f1-76d9-4994-9cc7-9b13da46147c - dockerImageTag: 0.4.0 + dockerImageTag: 0.4.1 dockerRepository: airbyte/source-chargebee documentationUrl: https://docs.airbyte.com/integrations/sources/chargebee githubIssueLabel: source-chargebee diff --git a/airbyte-integrations/connectors/source-chargebee/poetry.lock b/airbyte-integrations/connectors/source-chargebee/poetry.lock index 4f28624f1368a..df40d58edfcf4 100644 --- a/airbyte-integrations/connectors/source-chargebee/poetry.lock +++ b/airbyte-integrations/connectors/source-chargebee/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "airbyte-cdk" -version = "0.58.1" +version = "0.67.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-cdk-0.58.1.tar.gz", hash = "sha256:0725c63184c37c2caf89faa2c9972e759d73877d03715b9e3eb56a132a6764a8"}, - {file = "airbyte_cdk-0.58.1-py3-none-any.whl", hash = "sha256:605299228e8838cbe6ea39c6d89c38c9674f3997e7b9b77f1dfb7577d84e0874"}, + {file = "airbyte-cdk-0.67.1.tar.gz", hash = "sha256:3f82be93ae6f574c70d7ad5352d34f9235e86bd74c0db14a0aa7d246f3a403c2"}, + {file = "airbyte_cdk-0.67.1-py3-none-any.whl", hash = "sha256:b1de0f004441a2ae6e2928e55f7ac31bd160af30e928ffda90eb75b5e3c56bf3"}, ] [package.dependencies] @@ -104,13 +104,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -301,6 +301,20 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "freezegun" +version = "1.4.0" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + [[package]] name = "genson" version = "1.2.2" @@ -702,13 +716,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.tar.gz", hash = "sha256:78e73e19c63f5b20ffa567001531680d939dc042bf7850431877645523c66709"}, + {file = "python_dateutil-2.9.0-py2.py3-none-any.whl", hash = "sha256:cbf2f1da5e6083ac2fbfd4da39a25f34312230110440f424a14c7558bb85d82e"}, ] [package.dependencies] @@ -808,13 +822,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "1.1.1" +version = "1.2.0" description = "A persistent cache for python requests" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8" files = [ - {file = "requests_cache-1.1.1-py3-none-any.whl", hash = "sha256:c8420cf096f3aafde13c374979c21844752e2694ffd8710e6764685bb577ac90"}, - {file = "requests_cache-1.1.1.tar.gz", hash = "sha256:764f93d3fa860be72125a568c2cc8eafb151cf29b4dc2515433a56ee657e1c60"}, + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, ] [package.dependencies] @@ -826,15 +840,15 @@ url-normalize = ">=1.4" urllib3 = ">=1.25.5" [package.extras] -all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=5.4)", "redis (>=3)", "ujson (>=5.4)"] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] bson = ["bson (>=0.5)"] -docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.6)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] json = ["ujson (>=5.4)"] mongodb = ["pymongo (>=3)"] redis = ["redis (>=3)"] security = ["itsdangerous (>=2.0)"] -yaml = ["pyyaml (>=5.4)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" @@ -857,19 +871,19 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes [[package]] name = "setuptools" -version = "69.1.0" +version = "69.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -895,13 +909,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -920,13 +934,13 @@ six = "*" [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1031,4 +1045,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "71e2453d758f0222900531815381ff9c55f1d1a6a68f4c64b00c16de6727c8da" +content-hash = "3debef9fd80689da08f9af15d771faf278a8dc88bf02c3c617057da6189cd38b" diff --git a/airbyte-integrations/connectors/source-chargebee/pyproject.toml b/airbyte-integrations/connectors/source-chargebee/pyproject.toml index a3926727b9ff4..b97f55b2808f5 100644 --- a/airbyte-integrations/connectors/source-chargebee/pyproject.toml +++ b/airbyte-integrations/connectors/source-chargebee/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.4.0" +version = "0.4.1" name = "source-chargebee" description = "Source implementation for Chargebee." authors = [ "Airbyte ",] @@ -17,7 +17,8 @@ include = "source_chargebee" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "==0.58.1" +airbyte-cdk = "^0.67.1" +freezegun = "^1.4.0" [tool.poetry.scripts] source-chargebee = "source_chargebee.run:run" diff --git a/airbyte-integrations/connectors/source-chargebee/source_chargebee/manifest.yaml b/airbyte-integrations/connectors/source-chargebee/source_chargebee/manifest.yaml index 9b4fd4f7f2790..bbc097629c145 100644 --- a/airbyte-integrations/connectors/source-chargebee/source_chargebee/manifest.yaml +++ b/airbyte-integrations/connectors/source-chargebee/source_chargebee/manifest.yaml @@ -168,8 +168,6 @@ definitions: requester: $ref: "#/definitions/base_incremental_stream/retriever/requester" request_parameters: - sort_by[asc]: created_at - include_deleted: "true" updated_at[between]: "'[{{stream_slice['start_time']}}, {{stream_slice['end_time']}}]'" $parameters: name: "coupon" @@ -198,8 +196,7 @@ definitions: requester: $ref: "#/definitions/base_incremental_stream/retriever/requester" request_parameters: - sort_by[asc]: "occurred_at" - include_deleted: "true" + sort_by[asc]: occurred_at occurred_at[between]: "'[{{stream_slice['start_time']}}, {{stream_slice['end_time']}}]'" $parameters: name: "event" @@ -270,7 +267,6 @@ definitions: $ref: "#/definitions/base_incremental_stream/retriever/requester" request_parameters: sort_by[asc]: created_at - include_deleted: "true" created_at[between]: "'[{{stream_slice['start_time']}}, {{stream_slice['end_time']}}]'" $parameters: name: "promotional_credit" @@ -363,7 +359,7 @@ definitions: record_selector: $ref: "#/definitions/nested_selector" record_filter: - condition: "{{ record['updated_at'] >= ( stream_state.get('prior_state', {}).get('updated_at', 0) if stream_state else stream_slice.get('prior_state', {}).get('updated_at', 0) ) }}" + condition: "{{ record['migrated_at'] >= ( stream_state.get('prior_state', {}).get('migrated_at', 0) if stream_state else stream_slice.get('prior_state', {}).get('migrated_at', 0) ) }}" $parameters: name: "site_migration_detail" primary_key: "entity_id" @@ -377,7 +373,6 @@ definitions: $ref: "#/definitions/retriever/requester" request_parameters: sort_by[asc]: created_at - include_deleted: "true" created_at[between]: "'[{{stream_slice['start_time']}}, {{stream_slice['end_time']}}]'" incremental_sync: $ref: "#/definitions/date_stream_slicer" diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/__init__.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/config.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/config.py new file mode 100644 index 0000000000000..85f0de928865a --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/config.py @@ -0,0 +1,33 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime +from typing import Any, Dict + + +class ConfigBuilder: + def __init__(self) -> None: + self._config: Dict[str, Any] = { + "site": "ConfigBuilder default site", + "site_api_key": "ConfigBuilder default site api key", + "start_date": "2023-01-01T06:57:44Z", + "product_catalog": "2.0" + } + + def with_site(self, site: str) -> "ConfigBuilder": + self._config["site"] = site + return self + + def with_site_api_key(self, site_api_key: str) -> "ConfigBuilder": + self._config["site_api_key"] = site_api_key + return self + + def with_start_date(self, start_datetime: datetime) -> "ConfigBuilder": + self._config["start_date"] = start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ") + return self + + def with_product_catalog(self, product_catalog: str) -> "ConfigBuilder": + self._config["product_catalog"] = product_catalog or "2.0" + return self + + def build(self) -> Dict[str, Any]: + return self._config \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/pagination.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/pagination.py new file mode 100644 index 0000000000000..0cf9d9d5a5bcd --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/pagination.py @@ -0,0 +1,11 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict + +from airbyte_cdk.test.mock_http.response_builder import PaginationStrategy + + +class ChargebeePaginationStrategy(PaginationStrategy): + @staticmethod + def update(response: Dict[str, Any]) -> None: + response["next_offset"] = "[1707076198000,57873868]" \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/request_builder.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/request_builder.py new file mode 100644 index 0000000000000..f9b15d847be5f --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/request_builder.py @@ -0,0 +1,127 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import base64 +from datetime import datetime +from typing import List, Optional, Union + +from airbyte_cdk.test.mock_http import HttpRequest +from airbyte_cdk.test.mock_http.request import ANY_QUERY_PARAMS + + +class ChargebeeRequestBuilder: + + @classmethod + def addon_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("addons", site, site_api_key) + + @classmethod + def plan_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("plans", site, site_api_key) + + @classmethod + def virtual_bank_account_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("virtual_bank_accounts", site, site_api_key) + + @classmethod + def event_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("events", site, site_api_key) + + @classmethod + def site_migration_detail_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("site_migration_details", site, site_api_key) + + @classmethod + def customer_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("customers", site, site_api_key) + + @classmethod + def coupon_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("coupons", site, site_api_key) + + @classmethod + def subscription_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("subscriptions", site, site_api_key) + + @classmethod + def hosted_page_endpoint(cls, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + return cls("hosted_pages", site, site_api_key) + + def __init__(self, resource: str, site: str, site_api_key: str) -> "ChargebeeRequestBuilder": + self._resource: str = resource + self._site: str = site + self._site_api_key: str = site_api_key + self._any_query_params: bool = False + self._include_deleted: Optional[str] = None + self._created_at_btw: Optional[str] = None + self._updated_at_btw: Optional[str] = None + self._occurred_at_btw: Optional[str] = None + self._sort_by_asc: Optional[str] = None + self._sort_by_desc: Optional[str] = None + self._offset: Optional[str] = None + self._limit: Optional[str] = None + + def with_any_query_params(self) -> "ChargebeeRequestBuilder": + self._any_query_params = True + return self + + def with_include_deleted(self, include_deleted: bool) -> "ChargebeeRequestBuilder": + self._include_deleted = str(include_deleted).lower() + return self + + def with_created_at_btw(self, created_at_btw: List[int]) -> "ChargebeeRequestBuilder": + self._created_at_btw = f'{created_at_btw}' + return self + + def with_updated_at_btw(self, updated_at_btw: List[int]) -> "ChargebeeRequestBuilder": + self._updated_at_btw = f"{updated_at_btw}" + return self + + def with_occurred_at_btw(self, occurred_at_btw: List[int]) -> "ChargebeeRequestBuilder": + self._occurred_at_btw = f"{occurred_at_btw}" + return self + + def with_sort_by_asc(self, sort_by_asc: str) -> "ChargebeeRequestBuilder": + self._sort_by_asc = sort_by_asc + return self + + def with_sort_by_desc(self, sort_by_desc: str) -> "ChargebeeRequestBuilder": + self._sort_by_desc = sort_by_desc + return self + + def with_offset(self, offset: str) -> "ChargebeeRequestBuilder": + self._offset = offset + return self + + def with_limit(self, limit: int) -> "ChargebeeRequestBuilder": + self._limit = limit + return self + + def build(self) -> HttpRequest: + query_params= {} + if self._sort_by_asc: + query_params["sort_by[asc]"] = self._sort_by_asc + if self._sort_by_desc: + query_params["sort_by[desc]"] = self._sort_by_desc + if self._include_deleted: + query_params["include_deleted"] = self._include_deleted + if self._created_at_btw: + query_params["created_at[between]"] = self._created_at_btw + if self._updated_at_btw: + query_params["updated_at[between]"] = self._updated_at_btw + if self._occurred_at_btw: + query_params["occurred_at[between]"] = self._occurred_at_btw + if self._offset: + query_params["offset"] = self._offset + if self._limit: + query_params["limit"] = self._limit + + if self._any_query_params: + if query_params: + raise ValueError(f"Both `any_query_params` and {list(query_params.keys())} were configured. Provide only one of none but not both.") + query_params = ANY_QUERY_PARAMS + + return HttpRequest( + url=f"https://{self._site}.chargebee.com/api/v2/{self._resource}", + query_params=query_params, + headers={"Authorization": f"Basic {base64.b64encode((str(self._site_api_key) + ':').encode('utf-8')).decode('utf-8')}"}, + ) \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/response_builder.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/response_builder.py new file mode 100644 index 0000000000000..f9163b6be3a87 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/response_builder.py @@ -0,0 +1,14 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +import json +from typing import Mapping + +from airbyte_cdk.test.mock_http import HttpResponse +from airbyte_cdk.test.mock_http.response_builder import find_template + + +def a_response_with_status(status_code: int) -> HttpResponse: + return HttpResponse(json.dumps(find_template(str(status_code), __file__)), status_code) + +def a_response_with_status_and_header(status_code: int, header: Mapping[str, str]) -> HttpResponse: + return HttpResponse(json.dumps(find_template(str(status_code), __file__)), status_code, header) \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_addon.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_addon.py new file mode 100644 index 0000000000000..35e1257ab86fb --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_addon.py @@ -0,0 +1,201 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "addon" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "1.0" +_PRIMARY_KEY = "id" +_CURSOR_FIELD = "updated_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.addon_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + # Tests 400 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400) + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + # Test 401 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + # Tests backoff/retry with rate limiting + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + # Tests retry with 500 status + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_most_recently_read_slice(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), _NO_STATE) + assert output.most_recent_state == { _STREAM_NAME: {_CURSOR_FIELD: str(self._now_in_seconds) }} + + @HttpMocker() + def test_given_initial_state_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._now - timedelta(days=5)).timestamp()) + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([state_cursor_value, self._now_in_seconds]).build(), + _a_response().with_record(_a_record().with_cursor(self._now_in_seconds - 1)).build(), + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), state) + assert output.most_recent_state == { _STREAM_NAME: {_CURSOR_FIELD: str(self._now_in_seconds) }} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_coupon.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_coupon.py new file mode 100644 index 0000000000000..5709757d4584d --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_coupon.py @@ -0,0 +1,212 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "coupon" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "2.0" +_PRIMARY_KEY = "id" +_CURSOR_FIELD = "updated_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.coupon_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + @HttpMocker() + def test_given_records_returned_with_custom_field_transformation(self, http_mocker: HttpMocker) -> None: + # Tests custom field transformation + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_field(NestedPath([_STREAM_NAME, "cf_my_custom_field"]), "my_custom_value")).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert output.records[0].record.data["custom_fields"][0]["name"] == "cf_my_custom_field" + assert output.records[0].record.data["custom_fields"][0]["value"] == "my_custom_value" + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + # Tests 400 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400) + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + # Test 401 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + # Tests backoff/retry with rate limiting + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + # Tests retry with 500 status + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_most_recently_read_slice(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), _NO_STATE) + assert output.most_recent_state == { _STREAM_NAME: {_CURSOR_FIELD: str(self._now_in_seconds) }} + + @HttpMocker() + def test_given_initial_state_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._now - timedelta(days=5)).timestamp()) + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_updated_at_btw([state_cursor_value, self._now_in_seconds]).build(), + _a_response().with_record(_a_record().with_cursor(self._now_in_seconds - 1)).build(), + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), state) + assert output.most_recent_state == { _STREAM_NAME: {_CURSOR_FIELD: str(self._now_in_seconds) }} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_customer.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_customer.py new file mode 100644 index 0000000000000..7f3c7621b19b9 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_customer.py @@ -0,0 +1,211 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "customer" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "2.0" +_PRIMARY_KEY = "id" +_CURSOR_FIELD = "updated_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.customer_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + @HttpMocker() + def test_given_records_returned_with_custom_field_transformation(self, http_mocker: HttpMocker) -> None: + # Tests custom field transformation + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_field(NestedPath([_STREAM_NAME, "cf_my_custom_field"]), "my_custom_value")).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert output.records[0].record.data["custom_fields"][0]["name"] == "cf_my_custom_field" + assert output.records[0].record.data["custom_fields"][0]["value"] == "my_custom_value" + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + # Tests 400 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400) + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + # Test 401 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + # Tests backoff/retry with rate limiting + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + # Tests retry with 500 status + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_most_recently_read_slice(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), _NO_STATE) + assert output.most_recent_state == { _STREAM_NAME: {_CURSOR_FIELD: str(self._now_in_seconds) }} + + @HttpMocker() + def test_given_initial_state_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._now - timedelta(days=5)).timestamp()) + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([state_cursor_value, self._now_in_seconds]).build(), + _a_response().with_record(_a_record().with_cursor(self._now_in_seconds - 1)).build(), + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), state) + assert output.most_recent_state == { _STREAM_NAME: {_CURSOR_FIELD: str(self._now_in_seconds) }} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_event.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_event.py new file mode 100644 index 0000000000000..0c7794e01b300 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_event.py @@ -0,0 +1,197 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "event" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "2.0" +_PRIMARY_KEY = "id" +_CURSOR_FIELD = "occurred_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.event_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_occurred_at_btw([self._start_date_in_seconds, self._now_in_seconds]).build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_occurred_at_btw([self._start_date_in_seconds, self._now_in_seconds]).with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400), + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_most_recently_read_slice(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), _NO_STATE) + assert output.most_recent_state == { _STREAM_NAME: {_CURSOR_FIELD: str(self._now_in_seconds) }} + + @HttpMocker() + def test_given_initial_state_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._now - timedelta(days=5)).timestamp()) + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_occurred_at_btw([state_cursor_value, self._now_in_seconds]).build(), + _a_response().with_record(_a_record().with_cursor(self._now_in_seconds - 1)).build(), + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), state) + assert output.most_recent_state == { _STREAM_NAME: {_CURSOR_FIELD: str(self._now_in_seconds) }} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_hosted_page.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_hosted_page.py new file mode 100644 index 0000000000000..7e98a32792f00 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_hosted_page.py @@ -0,0 +1,196 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "hosted_page" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "2.0" +_PRIMARY_KEY = "id" +_CURSOR_FIELD = "updated_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.hosted_page_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400), + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_most_recently_read_slice(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), _NO_STATE) + assert output.most_recent_state == { _STREAM_NAME: {_CURSOR_FIELD: str(self._now_in_seconds) }} + + @HttpMocker() + def test_given_initial_state_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._now - timedelta(days=5)).timestamp()) + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([state_cursor_value, self._now_in_seconds]).build(), + _a_response().with_record(_a_record().with_cursor(self._now_in_seconds - 1)).build(), + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), state) + assert output.most_recent_state == { _STREAM_NAME: {_CURSOR_FIELD: str(self._now_in_seconds) }} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_plan.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_plan.py new file mode 100644 index 0000000000000..9cf6d3db4364a --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_plan.py @@ -0,0 +1,201 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "plan" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "1.0" +_PRIMARY_KEY = "id" +_CURSOR_FIELD = "updated_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.plan_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + # Tests 400 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400) + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + # Test 401 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + # Tests backoff/retry with rate limiting + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + # Tests retry with 500 status + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_most_recently_read_slice(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), _NO_STATE) + assert output.most_recent_state == { _STREAM_NAME: {_CURSOR_FIELD: str(self._now_in_seconds) }} + + @HttpMocker() + def test_given_initial_state_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._now - timedelta(days=5)).timestamp()) + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([state_cursor_value, self._now_in_seconds]).build(), + _a_response().with_record(_a_record().with_cursor(self._now_in_seconds - 1)).build(), + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), state) + assert output.most_recent_state == { _STREAM_NAME: {_CURSOR_FIELD: str(self._now_in_seconds) }} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_site_migration_detail.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_site_migration_detail.py new file mode 100644 index 0000000000000..b481c79d40d8b --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_site_migration_detail.py @@ -0,0 +1,213 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "site_migration_detail" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "2.0" +_PRIMARY_KEY = "entity_id" +_CURSOR_FIELD = "migrated_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +''' +Note that this is a semi-incremental stream and tests will need to be adapated accordingly +''' + + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.site_migration_detail_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + # Tests 400 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400) + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + # Test 401 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + # Tests backoff/retry with rate limiting + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + # Tests retry with 500 status + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + # Site Migration Detail stream is a semi-incremental stream and therefore state acts differently than typical declarative incremental implementation -- state is updated to most recent cursor value read + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_cursor_field(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).with_record(_a_record().with_cursor(cursor_value + 1)).build() + ) + output = self._read(_config().with_start_date(self._start_date), _NO_STATE) + assert output.most_recent_state[_STREAM_NAME][_CURSOR_FIELD] == cursor_value + 1 + assert len(output.state_messages) == 1 # one state message per read + assert len(output.records) == 2 # two records generated + + @HttpMocker() + def test_given_state_when_read_then_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._start_date + timedelta(days=31)).timestamp()) + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(self._now_in_seconds - 1)).with_record(_a_record().with_cursor(self._now_in_seconds - 2)).build(), + ) + output = self._read(_config().with_start_date(self._start_date), state) + assert len(output.state_messages) == 1 # one state message per read + assert len(output.records) == 2 # two records generated + assert output.most_recent_state[_STREAM_NAME][_CURSOR_FIELD] == self._now_in_seconds - 1 # should be cursor value of more recent record diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_subscription.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_subscription.py new file mode 100644 index 0000000000000..8b11a4410d5d4 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_subscription.py @@ -0,0 +1,212 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "subscription" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "2.0" +_PRIMARY_KEY = "id" +_CURSOR_FIELD = "updated_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.subscription_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + @HttpMocker() + def test_given_records_returned_with_custom_field_transformation(self, http_mocker: HttpMocker) -> None: + # Tests custom field transformation + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_field(NestedPath([_STREAM_NAME, "cf_my_custom_field"]), "my_custom_value")).build() + ) + output = self._read(_config().with_start_date(self._start_date)) + assert output.records[0].record.data["custom_fields"][0]["name"] == "cf_my_custom_field" + assert output.records[0].record.data["custom_fields"][0]["value"] == "my_custom_value" + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + # Tests 400 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400) + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + # Test 401 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + # Tests backoff/retry with rate limiting + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + # Tests retry with 500 status + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_most_recently_read_slice(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), _NO_STATE) + assert output.most_recent_state == { _STREAM_NAME: {_CURSOR_FIELD: str(self._now_in_seconds) }} + + @HttpMocker() + def test_given_initial_state_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._now - timedelta(days=5)).timestamp()) + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([state_cursor_value, self._now_in_seconds]).build(), + _a_response().with_record(_a_record().with_cursor(self._now_in_seconds - 1)).build(), + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), state) + assert output.most_recent_state == { _STREAM_NAME: {_CURSOR_FIELD: str(self._now_in_seconds) }} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_virtual_bank_account.py b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_virtual_bank_account.py new file mode 100644 index 0000000000000..86deac19fafec --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/integration/test_virtual_bank_account.py @@ -0,0 +1,201 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional +from unittest import TestCase + +import freezegun +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + NestedPath, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_cdk.test.state_builder import StateBuilder +from airbyte_protocol.models import ConfiguredAirbyteCatalog, FailureType, SyncMode +from source_chargebee import SourceChargebee + +from .config import ConfigBuilder +from .pagination import ChargebeePaginationStrategy +from .request_builder import ChargebeeRequestBuilder +from .response_builder import a_response_with_status, a_response_with_status_and_header + +_STREAM_NAME = "virtual_bank_account" +_SITE = "test-site" +_SITE_API_KEY = "test-api-key" +_PRODUCT_CATALOG = "2.0" +_PRIMARY_KEY = "id" +_CURSOR_FIELD = "updated_at" +_NO_STATE = {} +_NOW = datetime.now(timezone.utc) + +def _a_request() -> ChargebeeRequestBuilder: + return ChargebeeRequestBuilder.virtual_bank_account_endpoint(_SITE, _SITE_API_KEY) + +def _config() -> ConfigBuilder: + return ConfigBuilder().with_site(_SITE).with_site_api_key(_SITE_API_KEY).with_product_catalog(_PRODUCT_CATALOG) + +def _catalog(sync_mode: SyncMode) -> ConfiguredAirbyteCatalog: + return CatalogBuilder().with_stream(_STREAM_NAME, sync_mode).build() + +def _source() -> SourceChargebee: + return SourceChargebee() + +def _a_record() -> RecordBuilder: + return create_record_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + record_id_path=NestedPath([_STREAM_NAME, _PRIMARY_KEY]), + record_cursor_path=NestedPath([_STREAM_NAME, _CURSOR_FIELD]) + ) + +def _a_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_STREAM_NAME, __file__), + FieldPath("list"), + pagination_strategy=ChargebeePaginationStrategy() + ) + +def _read( + config_builder: ConfigBuilder, + sync_mode: SyncMode, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False +) -> EntrypointOutput: + catalog = _catalog(sync_mode) + config = config_builder.build() + return read(_source(), config, catalog, state, expecting_exception) + +@freezegun.freeze_time(_NOW.isoformat()) +class FullRefreshTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=28) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.full_refresh, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_valid_response_records_are_extracted_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests simple read and record extraction + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record()).with_record(_a_record()).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8))) + assert len(output.records) == 2 + + @HttpMocker() + def test_given_multiple_pages_of_records_read_and_returned(self, http_mocker: HttpMocker) -> None: + # Tests pagination + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).build(), + _a_response().with_record(_a_record()).with_pagination().build() + ) + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([self._start_date_in_seconds, self._now_in_seconds]).with_offset("[1707076198000,57873868]").build(), + _a_response().with_record(_a_record()).build() + ) + + self._read(_config().with_start_date(self._start_date)) + # HTTPMocker ensures call are performed + + @HttpMocker() + def test_given_http_status_400_when_read_then_stream_is_ignored(self, http_mocker: HttpMocker) -> None: + # Tests 400 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(400) + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert len(output.get_stream_statuses(f"{_STREAM_NAME}s")) == 0 + + + @HttpMocker() + def test_given_http_status_401_when_the_stream_is_incomplete(self, http_mocker: HttpMocker) -> None: + # Test 401 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status(401), + ) + output = self._read(_config().with_start_date(self._start_date), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + + @HttpMocker() + def test_given_rate_limited_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None: + # Tests backoff/retry with rate limiting + http_mocker.get( + _a_request().with_any_query_params().build(), + [ + a_response_with_status_and_header(429, {"Retry-After": "0.01"}), + _a_response().with_record(_a_record()).build(), + ], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_then_retry_returns_200_and_extracted_records(self, http_mocker: HttpMocker) -> None: + # Tests retry with 500 status + http_mocker.get( + _a_request().with_any_query_params().build(), + [a_response_with_status_and_header(500, {"Retry-After": "0.01"}), _a_response().with_record(_a_record()).build()], + ) + output = self._read(_config().with_start_date(self._start_date)) + assert len(output.records) == 1 + + @HttpMocker() + def test_given_http_status_500_after_max_retries_raises_config_error(self, http_mocker: HttpMocker) -> None: + # Tests 500 status error handling + http_mocker.get( + _a_request().with_any_query_params().build(), + a_response_with_status_and_header(500, {"Retry-After": "0.01"}), + ) + output = self._read(_config(), expecting_exception=True) + assert output.errors[-1].trace.error.failure_type == FailureType.config_error + +@freezegun.freeze_time(_NOW.isoformat()) +class IncrementalTest(TestCase): + + def setUp(self) -> None: + self._now = _NOW + self._now_in_seconds = int(self._now.timestamp()) + self._start_date = _NOW - timedelta(days=60) + self._start_date_in_seconds = int(self._start_date.timestamp()) + + @staticmethod + def _read(config: ConfigBuilder, state: Dict[str, Any], expecting_exception: bool = False) -> EntrypointOutput: + return _read(config, SyncMode.incremental, state, expecting_exception=expecting_exception) + + @HttpMocker() + def test_given_no_initial_state_when_read_then_return_state_based_on_most_recently_read_slice(self, http_mocker: HttpMocker) -> None: + # Tests setting state when no initial state is provided + cursor_value = self._start_date_in_seconds + 1 + http_mocker.get( + _a_request().with_any_query_params().build(), + _a_response().with_record(_a_record().with_cursor(cursor_value)).build() + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), _NO_STATE) + assert output.most_recent_state == { _STREAM_NAME: {_CURSOR_FIELD: str(self._now_in_seconds) }} + + @HttpMocker() + def test_given_initial_state_use_state_for_query_params(self, http_mocker: HttpMocker) -> None: + # Tests updating query param with state + state_cursor_value = int((self._now - timedelta(days=5)).timestamp()) + state = StateBuilder().with_stream_state(_STREAM_NAME, {_CURSOR_FIELD: state_cursor_value}).build() + http_mocker.get( + _a_request().with_sort_by_asc(_CURSOR_FIELD).with_include_deleted(True).with_updated_at_btw([state_cursor_value, self._now_in_seconds]).build(), + _a_response().with_record(_a_record().with_cursor(self._now_in_seconds - 1)).build(), + ) + output = self._read(_config().with_start_date(self._start_date - timedelta(hours=8)), state) + assert output.most_recent_state == { _STREAM_NAME: {_CURSOR_FIELD: str(self._now_in_seconds) }} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/400.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/400.json new file mode 100644 index 0000000000000..b28d57efa8f76 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/400.json @@ -0,0 +1,7 @@ +{ + "message": "Resource has expired", + "api_error_code": "resource_limit_exhausted", + "error_code": "resource_limit_exhausted", + "error_msg": "Resource has expired", + "http_status_code": 400 +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/401.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/401.json new file mode 100644 index 0000000000000..f491037b2b519 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/401.json @@ -0,0 +1,7 @@ +{ + "message": "Sorry,authentication failed.The basic authentication header has invalid format.", + "api_error_code": "api_authentication_failed", + "error_code": "api_authentication_wrong_format", + "error_msg": "Sorry,authentication failed.The basic authentication header has invalid format.", + "http_status_code": 401 +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/403.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/403.json new file mode 100644 index 0000000000000..3a2689b762fed --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/403.json @@ -0,0 +1,7 @@ +{ + "message": "Sorry, authorization failed. The key does not have the required permissions.", + "api_error_code": "api_authorization_failed", + "error_code": "api_authorization_failed", + "error_msg": "Sorry, authorization failed. The key does not have the required permissions.", + "http_status_code": 403 +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/429.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/429.json new file mode 100644 index 0000000000000..e962e573e293c --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/429.json @@ -0,0 +1,7 @@ +{ + "message": "Sorry, access has been blocked temporarily due to request count exceeding acceptable limits. Please try after some time.", + "api_error_code": "api_request_limit_exceeded", + "error_code": "api_request_limit_exceeded", + "error_msg": "Sorry, access has been blocked temporarily due to request count exceeding acceptable limits. Please try after some time.", + "http_status_code": 429 +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/500.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/500.json new file mode 100644 index 0000000000000..ad57f127be9e2 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/500.json @@ -0,0 +1,7 @@ +{ + "message": "Sorry,Something went wrong when trying to process the request.", + "api_error_code": "internal_error", + "error_code": "internal_error", + "error_msg": "Sorry,Something went wrong when trying to process the request.", + "http_status_code": 500 +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/addon.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/addon.json new file mode 100644 index 0000000000000..90e6f366d97f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/addon.json @@ -0,0 +1,33 @@ +{ + "list": [ + { + "addon": { + "charge_type": "recurring", + "currency_code": "USD", + "enabled_in_portal": true, + "id": "tiered_addon", + "is_shippable": false, + "name": "Tiered Addon", + "object": "addon", + "period": 1, + "period_unit": "month", + "pricing_model": "tiered", + "resource_version": 1517505776000, + "show_description_in_invoices": false, + "show_description_in_quotes": false, + "status": "active", + "taxable": true, + "tiers": [ + { + "ending_unit": 10, + "object": "tier", + "price": 100, + "starting_unit": 1 + } + ], + "type": "tiered", + "updated_at": 1517505776 + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/coupon.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/coupon.json new file mode 100644 index 0000000000000..2ed681b35de3c --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/coupon.json @@ -0,0 +1,27 @@ +{ + "list": [ + { + "coupon": { + "apply_discount_on": "not_applicable", + "apply_on": "each_specified_item", + "created_at": 1517495314, + "discount_percentage": 10, + "discount_type": "percentage", + "duration_type": "forever", + "id": "summer_offer", + "item_constraints": [ + { + "constraint": "all", + "item_type": "plan" + } + ], + "name": "Summer Offer", + "object": "coupon", + "redemptions": 0, + "resource_version": 1517495314967, + "status": "active", + "updated_at": 1517495314 + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/customer.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/customer.json new file mode 100644 index 0000000000000..23465a7b7d51f --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/customer.json @@ -0,0 +1,28 @@ +{ + "list": [ + { + "customer": { + "allow_direct_debit": false, + "auto_collection": "on", + "card_status": "no_card", + "created_at": 1517505747, + "deleted": false, + "email": "john@test.com", + "excess_payments": 0, + "first_name": "John", + "id": "__test__KyVnHhSBWlC1T2cj", + "last_name": "Doe", + "net_term_days": 0, + "object": "customer", + "pii_cleared": "active", + "preferred_currency_code": "USD", + "promotional_credits": 0, + "refundable_credits": 0, + "resource_version": 1517505747000, + "taxability": "taxable", + "unbilled_charges": 0, + "updated_at": 1517505747 + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/event.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/event.json new file mode 100644 index 0000000000000..58587184027c6 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/event.json @@ -0,0 +1,204 @@ +{ + "list": [ + { + "event": { + "id": "ev_16BPgETyVrQbiGhA", + "occurred_at": 1706822167, + "source": "admin_console", + "user": "sarah@sarah.com", + "object": "event", + "api_version": "v2", + "content": { + "subscription": { + "id": "16BPgETyVrQVHGh1", + "billing_period": 1, + "billing_period_unit": "month", + "customer_id": "sarah", + "status": "active", + "current_term_start": 1702578600, + "current_term_end": 1705256999, + "next_billing_at": 1705257000, + "created_at": 1702645601, + "started_at": 1702578600, + "activated_at": 1702578600, + "created_from_ip": "10.0.0.1", + "updated_at": 1702645601, + "has_scheduled_changes": false, + "channel": "web", + "resource_version": 1702645601793, + "deleted": false, + "object": "subscription", + "currency_code": "INR", + "subscription_items": [ + { + "item_price_id": "cross-train-advanced-INR-1_MONTH", + "item_type": "plan", + "quantity": 1, + "quantity_in_decimal": "1.0000", + "unit_price": 11667, + "unit_price_in_decimal": "116.66667", + "amount": 11667, + "amount_in_decimal": "116.66667", + "free_quantity": 0, + "free_quantity_in_decimal": "0.0000", + "object": "subscription_item" + } + ], + "due_invoices_count": 0, + "mrr": 0, + "has_scheduled_advance_invoices": false, + "override_relationship": false, + "create_pending_invoices": false, + "auto_close_invoices": true, + "business_entity_id": "16CQtCTrgrYwi9n2E" + }, + "customer": { + "id": "sarah", + "auto_collection": "on", + "net_term_days": 0, + "allow_direct_debit": false, + "created_at": 1700038561, + "created_from_ip": "10.0.0.2", + "taxability": "taxable", + "updated_at": 1702645580, + "pii_cleared": "active", + "channel": "web", + "resource_version": 1702645580741, + "deleted": false, + "object": "customer", + "card_status": "valid", + "promotional_credits": 0, + "refundable_credits": 0, + "excess_payments": 0, + "unbilled_charges": 0, + "preferred_currency_code": "INR", + "mrr": 0, + "primary_payment_source_id": "pm_169vujTyVrL5fFDl", + "payment_method": { + "object": "payment_method", + "type": "card", + "reference_id": "tok_169vujTyVrL5LFDk", + "gateway": "chargebee", + "gateway_account_id": "gw_1mk51R4QrLmQtYMht", + "status": "valid" + }, + "business_entity_id": "16CQtCTrgrYwi9n2E", + "tax_providers_fields": [], + "auto_close_invoices": true + }, + "card": { + "status": "valid", + "gateway": "chargebee", + "gateway_account_id": "gw_1mk51R4QrLmQtYMht", + "iin": "411111", + "last4": "1111", + "card_type": "visa", + "funding_type": "credit", + "expiry_month": 12, + "expiry_year": 2024, + "created_at": 1702645580, + "updated_at": 1702645580, + "ip_address": "10.0.0.1", + "resource_version": 1702645580740, + "object": "card", + "masked_number": "************1111", + "customer_id": "boom", + "payment_source_id": "pm_169vujTyVrL5fFDl" + }, + "invoice": { + "id": "203", + "customer_id": "boom", + "subscription_id": "16BPgETyVrQVHGh1", + "recurring": true, + "status": "paid", + "price_type": "tax_exclusive", + "date": 1702578600, + "due_date": 1702578600, + "net_term_days": 0, + "exchange_rate": 83.283543, + "total": 11667, + "amount_paid": 11667, + "amount_adjusted": 0, + "write_off_amount": 0, + "credits_applied": 0, + "amount_due": 0, + "paid_at": 1702645601, + "updated_at": 1702645601, + "resource_version": 1702645601783, + "deleted": false, + "object": "invoice", + "first_invoice": true, + "amount_to_collect": 0, + "round_off_amount": 0, + "new_sales_amount": 11667, + "has_advance_charges": false, + "currency_code": "INR", + "base_currency_code": "USD", + "generated_at": 1702578600, + "is_gifted": false, + "term_finalized": true, + "channel": "web", + "tax": 0, + "line_items": [ + { + "id": "li_16BPgETyVrQWBGh3", + "date_from": 1702578600, + "date_to": 1705256999, + "unit_amount": 11667, + "quantity": 1, + "amount": 11667, + "pricing_model": "per_unit", + "is_taxed": false, + "tax_amount": 0, + "unit_amount_in_decimal": "116.66667", + "quantity_in_decimal": "1.0000", + "amount_in_decimal": "116.66667", + "object": "line_item", + "subscription_id": "16BPgETyVrQVHGh1", + "customer_id": "boom", + "description": "cross-train-advanced-INR-1_MONTH", + "entity_type": "plan_item_price", + "entity_id": "cross-train-advanced-INR-1_MONTH", + "metered": false, + "tax_exempt_reason": "export", + "discount_amount": 0, + "item_level_discount_amount": 0 + } + ], + "sub_total": 11667, + "linked_payments": [ + { + "txn_id": "txn_16BPgETyVrQXVGh4", + "applied_amount": 11667, + "applied_at": 1702645601, + "txn_status": "success", + "txn_date": 1702645601, + "txn_amount": 11667 + } + ], + "applied_credits": [], + "adjustment_credit_notes": [], + "issued_credit_notes": [], + "linked_orders": [], + "dunning_attempts": [], + "notes": [ + { + "note": "You can pay card." + } + ], + "business_entity_id": "16CQtCTrgrYwi9n2E" + } + }, + "event_type": "subscription_created", + "webhook_status": "not_configured", + "webhooks": [ + { + "id": "whv2_Azz5aITsMVdKtVWV", + "webhook_status": "not_applicable", + "object": "webhook" + } + ] + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/hosted_page.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/hosted_page.json new file mode 100644 index 0000000000000..7846ef2df8d5e --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/hosted_page.json @@ -0,0 +1,18 @@ +{ + "list": [ + { + "hosted_page": { + "created_at": 1517678804, + "embed": false, + "expires_at": 1517682404, + "id": "__test__yRVH4Pr8siRXJEPsjeJXlcd8Aq1fDqVzd", + "object": "hosted_page", + "resource_version": 1517678804000, + "state": "created", + "type": "claim_gift", + "updated_at": 1517678804, + "url": "https://yourapp.chargebee.com/pages/v3/__test__yRVH4Pr8siRXJEPsjeJXlcd8Aq1fDqVzd/claim_gift" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/plan.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/plan.json new file mode 100644 index 0000000000000..9a3e8adcf359a --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/plan.json @@ -0,0 +1,37 @@ +{ + "list": [ + { + "plan": { + "addon_applicability": "all", + "charge_model": "tiered", + "currency_code": "USD", + "enabled_in_hosted_pages": true, + "enabled_in_portal": true, + "free_quantity": 0, + "giftable": false, + "id": "tiered_plan", + "invoice_name": "sample Tiered Plan", + "is_shippable": false, + "name": "Tiered Plan", + "object": "plan", + "period": 1, + "period_unit": "month", + "pricing_model": "tiered", + "resource_version": 1517505798000, + "show_description_in_invoices": false, + "show_description_in_quotes": false, + "status": "active", + "taxable": true, + "tiers": [ + { + "ending_unit": 10, + "object": "tier", + "price": 100, + "starting_unit": 1 + } + ], + "updated_at": 1517505798 + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/site_migration_detail.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/site_migration_detail.json new file mode 100644 index 0000000000000..1a0a3effd3fb1 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/site_migration_detail.json @@ -0,0 +1,15 @@ +{ + "list": [ + { + "site_migration_detail": { + "entity_id": "__test__KyVnHhSBWT9AW8j", + "entity_id_at_other_site": "__test__KyVnHhSBWT9AW8j", + "entity_type": "customer", + "migrated_at": 1600704658, + "object": "site_migration_detail", + "other_site_name": "mannar", + "status": "moved_in" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/subscription.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/subscription.json new file mode 100644 index 0000000000000..064a2121250d6 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/subscription.json @@ -0,0 +1,63 @@ +{ + "list": [ + { + "customer": { + "allow_direct_debit": false, + "auto_collection": "off", + "card_status": "no_card", + "created_at": 1612890919, + "deleted": false, + "excess_payments": 0, + "first_name": "John", + "id": "__test__8asukSOXdvg4PD", + "last_name": "Doe", + "net_term_days": 0, + "object": "customer", + "pii_cleared": "active", + "preferred_currency_code": "USD", + "promotional_credits": 0, + "refundable_credits": 0, + "resource_version": 1612890919000, + "taxability": "taxable", + "unbilled_charges": 0, + "updated_at": 1612890919 + }, + "subscription": { + "activated_at": 1612890920, + "billing_period": 1, + "billing_period_unit": "month", + "created_at": 1612890920, + "currency_code": "USD", + "current_term_end": 1615310120, + "current_term_start": 1612890920, + "customer_id": "__test__8asukSOXdvg4PD", + "deleted": false, + "due_invoices_count": 1, + "due_since": 1612890920, + "has_scheduled_changes": false, + "id": "__test__8asukSOXdvliPG", + "mrr": 0, + "next_billing_at": 1615310120, + "object": "subscription", + "remaining_billing_cycles": 1, + "resource_version": 1612890920000, + "started_at": 1612890920, + "status": "active", + "subscription_items": [ + { + "amount": 1000, + "billing_cycles": 1, + "free_quantity": 0, + "item_price_id": "basic-USD", + "item_type": "plan", + "object": "subscription_item", + "quantity": 1, + "unit_price": 1000 + } + ], + "total_dues": 1100, + "updated_at": 1612890920 + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/virtual_bank_account.json b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/virtual_bank_account.json new file mode 100644 index 0000000000000..ac8667ffee032 --- /dev/null +++ b/airbyte-integrations/connectors/source-chargebee/unit_tests/resource/http/response/virtual_bank_account.json @@ -0,0 +1,24 @@ +{ + "list": [ + { + "virtual_bank_account": { + "account_number": "test_5a576cb69dc2", + "bank_name": "TEST BANK", + "created_at": 1517501396, + "customer_id": "__test__KyVnHhSBWSvsr5M", + "deleted": false, + "email": "Duncan@ac.com", + "gateway": "stripe", + "gateway_account_id": "gw___test__KyVnGlSBWSv3GHt", + "id": "vba___test__KyVnHhSBWSw7J5O", + "object": "virtual_bank_account", + "reference_id": "cus_I57FLcFhampr4H/src_1HUx16Jv9j0DyntJh6X59egJ", + "resource_version": 1517501396000, + "routing_number": "110000000", + "scheme": "ach_credit", + "swift_code": "TSTEZ122", + "updated_at": 1705697624 + } + } + ] +} diff --git a/docs/integrations/sources/chargebee.md b/docs/integrations/sources/chargebee.md index 27b6568ef8505..ab3d8c2c06193 100644 --- a/docs/integrations/sources/chargebee.md +++ b/docs/integrations/sources/chargebee.md @@ -99,6 +99,7 @@ The Chargebee connector should not run into [Chargebee API](https://apidocs.char | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :-------------------------------------------------------------------------------------------------- | +| 0.4.1 | 2024-03-03 | [35509](https://github.com/airbytehq/airbyte/pull/35509) | Updates CDK version to latest (0.67.1), updates `site_migration_detail` record filtering | | 0.4.0 | 2024-02-12 | [34053](https://github.com/airbytehq/airbyte/pull/34053) | Add missing fields to and cleans up schemas, adds incremental support for `gift`, `site_migration_detail`, and `unbilled_charge` streams.` | | 0.3.1 | 2024-02-12 | [35169](https://github.com/airbytehq/airbyte/pull/35169) | Manage dependencies with Poetry. | | 0.3.0 | 2023-12-26 | [33696](https://github.com/airbytehq/airbyte/pull/33696) | Add new stream, add fields to existing streams |