From d233eb110f75d074d8240006f0582b35e151d2a4 Mon Sep 17 00:00:00 2001 From: fern-api <115122769+fern-api[bot]@users.noreply.github.com> Date: Wed, 24 Jul 2024 11:46:04 +0000 Subject: [PATCH 1/2] SDK regeneration --- poetry.lock | 203 +- pyproject.toml | 3 +- reference.md | 43086 ++++++++++++++++ src/merge/client.py | 8 +- src/merge/core/__init__.py | 19 +- src/merge/core/client_wrapper.py | 2 +- src/merge/core/http_client.py | 56 +- src/merge/core/jsonable_encoder.py | 58 +- src/merge/core/pydantic_utilities.py | 157 +- src/merge/core/query_encoder.py | 6 +- src/merge/core/request_options.py | 4 +- .../resources/account_details/client.py | 20 +- .../resources/account_token/client.py | 24 +- .../resources/accounting_periods/client.py | 42 +- .../accounting/resources/accounts/client.py | 82 +- .../accounting/resources/addresses/client.py | 24 +- .../resources/async_passthrough/client.py | 52 +- .../resources/attachments/client.py | 82 +- .../resources/audit_trail/client.py | 20 +- .../resources/available_actions/client.py | 20 +- .../resources/balance_sheets/client.py | 42 +- .../resources/cash_flow_statements/client.py | 42 +- .../resources/company_info/client.py | 42 +- .../accounting/resources/contacts/client.py | 82 +- .../resources/credit_notes/client.py | 42 +- .../resources/delete_account/client.py | 18 +- .../accounting/resources/expenses/client.py | 82 +- .../resources/field_mapping/client.py | 132 +- .../resources/force_resync/client.py | 20 +- .../resources/generate_key/client.py | 24 +- .../resources/income_statements/client.py | 42 +- .../accounting/resources/invoices/client.py | 128 +- .../accounting/resources/issues/client.py | 42 +- .../accounting/resources/items/client.py | 42 +- .../resources/journal_entries/client.py | 82 +- .../accounting/resources/link_token/client.py | 30 +- .../resources/linked_accounts/client.py | 20 +- .../resources/passthrough/client.py | 30 +- .../accounting/resources/payments/client.py | 128 +- .../resources/phone_numbers/client.py | 24 +- .../resources/purchase_orders/client.py | 82 +- .../resources/regenerate_key/client.py | 24 +- .../accounting/resources/scopes/client.py | 100 +- .../resources/sync_status/client.py | 20 +- .../accounting/resources/tax_rates/client.py | 42 +- .../resources/tracking_categories/client.py | 42 +- .../resources/transactions/client.py | 42 +- .../resources/vendor_credits/client.py | 42 +- .../resources/webhook_receivers/client.py | 44 +- .../resources/accounting/types/account.py | 58 +- .../accounting/types/account_details.py | 33 +- .../types/account_details_and_actions.py | 35 +- ...account_details_and_actions_integration.py | 31 +- .../accounting/types/account_integration.py | 47 +- .../accounting/types/account_request.py | 51 +- .../accounting/types/account_response.py | 31 +- .../accounting/types/account_token.py | 31 +- .../accounting/types/accounting_attachment.py | 44 +- .../types/accounting_attachment_request.py | 37 +- .../types/accounting_attachment_response.py | 31 +- .../accounting/types/accounting_period.py | 40 +- .../types/accounting_phone_number.py | 38 +- .../types/accounting_phone_number_request.py | 35 +- .../resources/accounting/types/address.py | 48 +- .../accounting/types/address_request.py | 45 +- .../accounting/types/advanced_metadata.py | 31 +- .../types/async_passthrough_reciept.py | 31 +- .../accounting/types/audit_log_event.py | 38 +- .../accounting/types/available_actions.py | 31 +- .../accounting/types/balance_sheet.py | 50 +- .../accounting/types/cash_flow_statement.py | 54 +- .../types/common_model_scope_api.py | 33 +- .../types/common_model_scopes_body_request.py | 31 +- .../accounting/types/company_info.py | 54 +- .../resources/accounting/types/contact.py | 60 +- .../accounting/types/contact_request.py | 51 +- .../accounting/types/contact_response.py | 31 +- .../resources/accounting/types/credit_note.py | 66 +- .../accounting/types/credit_note_line_item.py | 58 +- .../types/data_passthrough_request.py | 43 +- .../accounting/types/debug_mode_log.py | 31 +- .../types/debug_model_log_summary.py | 31 +- .../types/error_validation_problem.py | 31 +- .../resources/accounting/types/expense.py | 62 +- .../accounting/types/expense_line.py | 54 +- .../accounting/types/expense_line_request.py | 49 +- .../accounting/types/expense_request.py | 52 +- .../accounting/types/expense_response.py | 31 +- .../types/external_target_field_api.py | 31 +- .../external_target_field_api_response.py | 73 +- .../types/field_mapping_api_instance.py | 31 +- ...field_mapping_api_instance_remote_field.py | 35 +- ...tance_remote_field_remote_endpoint_info.py | 31 +- .../field_mapping_api_instance_response.py | 73 +- ...field_mapping_api_instance_target_field.py | 31 +- .../types/field_mapping_instance_response.py | 31 +- .../types/field_permission_deserializer.py | 31 +- .../field_permission_deserializer_request.py | 31 +- .../accounting/types/income_statement.py | 54 +- ...ividual_common_model_scope_deserializer.py | 31 +- ...common_model_scope_deserializer_request.py | 31 +- .../resources/accounting/types/invoice.py | 76 +- .../accounting/types/invoice_line_item.py | 52 +- .../types/invoice_line_item_request.py | 47 +- .../accounting/types/invoice_request.py | 64 +- .../accounting/types/invoice_response.py | 31 +- src/merge/resources/accounting/types/issue.py | 32 +- src/merge/resources/accounting/types/item.py | 54 +- .../accounting/types/journal_entry.py | 62 +- .../accounting/types/journal_entry_request.py | 46 +- .../types/journal_entry_response.py | 31 +- .../accounting/types/journal_line.py | 48 +- .../accounting/types/journal_line_request.py | 43 +- .../resources/accounting/types/link_token.py | 31 +- .../accounting/types/linked_account_status.py | 31 +- .../accounting/types/meta_response.py | 31 +- .../accounting/types/model_operation.py | 31 +- .../types/model_permission_deserializer.py | 31 +- .../model_permission_deserializer_request.py | 31 +- .../types/multipart_form_field_request.py | 41 +- ...inated_account_details_and_actions_list.py | 31 +- .../types/paginated_account_list.py | 31 +- .../paginated_accounting_attachment_list.py | 31 +- .../types/paginated_accounting_period_list.py | 31 +- .../types/paginated_audit_log_event_list.py | 31 +- .../types/paginated_balance_sheet_list.py | 31 +- .../paginated_cash_flow_statement_list.py | 31 +- .../types/paginated_company_info_list.py | 31 +- .../types/paginated_contact_list.py | 31 +- .../types/paginated_credit_note_list.py | 31 +- .../types/paginated_expense_list.py | 31 +- .../types/paginated_income_statement_list.py | 31 +- .../types/paginated_invoice_list.py | 31 +- .../accounting/types/paginated_issue_list.py | 31 +- .../accounting/types/paginated_item_list.py | 31 +- .../types/paginated_journal_entry_list.py | 31 +- .../types/paginated_payment_list.py | 31 +- .../types/paginated_purchase_order_list.py | 31 +- .../types/paginated_sync_status_list.py | 31 +- .../types/paginated_tax_rate_list.py | 31 +- .../types/paginated_tracking_category_list.py | 31 +- .../types/paginated_transaction_list.py | 31 +- .../types/paginated_vendor_credit_list.py | 31 +- .../types/patched_payment_request.py | 50 +- .../resources/accounting/types/payment.py | 60 +- .../accounting/types/payment_line_item.py | 44 +- .../types/payment_line_item_request.py | 40 +- .../accounting/types/payment_request.py | 50 +- .../accounting/types/payment_response.py | 31 +- .../accounting/types/purchase_order.py | 68 +- .../types/purchase_order_line_item.py | 60 +- .../types/purchase_order_line_item_request.py | 55 +- .../types/purchase_order_request.py | 52 +- .../types/purchase_order_response.py | 31 +- .../resources/accounting/types/remote_data.py | 31 +- .../accounting/types/remote_endpoint_info.py | 31 +- .../accounting/types/remote_field_api.py | 35 +- .../types/remote_field_api_response.py | 73 +- .../resources/accounting/types/remote_key.py | 31 +- .../accounting/types/remote_response.py | 31 +- .../resources/accounting/types/report_item.py | 42 +- .../resources/accounting/types/sync_status.py | 30 +- .../resources/accounting/types/tax_rate.py | 46 +- .../accounting/types/tracking_category.py | 48 +- .../resources/accounting/types/transaction.py | 58 +- .../accounting/types/transaction_line_item.py | 58 +- .../types/validation_problem_source.py | 31 +- .../accounting/types/vendor_credit.py | 54 +- .../accounting/types/vendor_credit_line.py | 52 +- .../types/warning_validation_problem.py | 31 +- .../accounting/types/webhook_receiver.py | 31 +- .../ats/resources/account_details/client.py | 20 +- .../ats/resources/account_token/client.py | 24 +- .../ats/resources/activities/client.py | 84 +- .../ats/resources/applications/client.py | 106 +- .../ats/resources/async_passthrough/client.py | 52 +- .../ats/resources/attachments/client.py | 84 +- .../ats/resources/audit_trail/client.py | 20 +- .../ats/resources/available_actions/client.py | 20 +- .../ats/resources/candidates/client.py | 156 +- .../ats/resources/delete_account/client.py | 18 +- .../ats/resources/departments/client.py | 42 +- .../resources/ats/resources/eeocs/client.py | 42 +- .../ats/resources/field_mapping/client.py | 132 +- .../ats/resources/force_resync/client.py | 20 +- .../ats/resources/generate_key/client.py | 24 +- .../ats/resources/interviews/client.py | 84 +- .../resources/ats/resources/issues/client.py | 42 +- .../resources/job_interview_stages/client.py | 42 +- .../ats/resources/job_postings/client.py | 42 +- .../resources/ats/resources/jobs/client.py | 64 +- .../ats/resources/link_token/client.py | 30 +- .../ats/resources/linked_accounts/client.py | 20 +- .../resources/ats/resources/offers/client.py | 42 +- .../resources/ats/resources/offices/client.py | 42 +- .../ats/resources/passthrough/client.py | 30 +- .../ats/resources/regenerate_key/client.py | 24 +- .../ats/resources/reject_reasons/client.py | 42 +- .../resources/ats/resources/scopes/client.py | 100 +- .../ats/resources/scorecards/client.py | 42 +- .../ats/resources/sync_status/client.py | 20 +- .../resources/ats/resources/tags/client.py | 20 +- .../resources/ats/resources/users/client.py | 42 +- .../ats/resources/webhook_receivers/client.py | 44 +- .../resources/ats/types/account_details.py | 33 +- .../ats/types/account_details_and_actions.py | 35 +- ...account_details_and_actions_integration.py | 31 +- .../ats/types/account_integration.py | 47 +- .../resources/ats/types/account_token.py | 31 +- src/merge/resources/ats/types/activity.py | 52 +- .../resources/ats/types/activity_request.py | 43 +- .../resources/ats/types/activity_response.py | 31 +- .../resources/ats/types/advanced_metadata.py | 31 +- src/merge/resources/ats/types/application.py | 54 +- .../ats/types/application_request.py | 46 +- .../ats/types/application_response.py | 31 +- .../ats/types/async_passthrough_reciept.py | 31 +- src/merge/resources/ats/types/attachment.py | 44 +- .../resources/ats/types/attachment_request.py | 39 +- .../ats/types/attachment_response.py | 31 +- .../resources/ats/types/audit_log_event.py | 38 +- .../resources/ats/types/available_actions.py | 31 +- src/merge/resources/ats/types/candidate.py | 64 +- .../resources/ats/types/candidate_request.py | 52 +- .../resources/ats/types/candidate_response.py | 31 +- .../ats/types/common_model_scope_api.py | 33 +- .../types/common_model_scopes_body_request.py | 31 +- .../ats/types/data_passthrough_request.py | 43 +- .../resources/ats/types/debug_mode_log.py | 31 +- .../ats/types/debug_model_log_summary.py | 31 +- src/merge/resources/ats/types/department.py | 40 +- src/merge/resources/ats/types/eeoc.py | 50 +- .../resources/ats/types/email_address.py | 38 +- .../ats/types/email_address_request.py | 35 +- .../ats/types/error_validation_problem.py | 31 +- .../ats/types/external_target_field_api.py | 31 +- .../external_target_field_api_response.py | 69 +- .../ats/types/field_mapping_api_instance.py | 31 +- ...field_mapping_api_instance_remote_field.py | 35 +- ...tance_remote_field_remote_endpoint_info.py | 31 +- .../field_mapping_api_instance_response.py | 69 +- ...field_mapping_api_instance_target_field.py | 31 +- .../types/field_mapping_instance_response.py | 31 +- .../types/field_permission_deserializer.py | 31 +- .../field_permission_deserializer_request.py | 31 +- ...ividual_common_model_scope_deserializer.py | 31 +- ...common_model_scope_deserializer_request.py | 31 +- src/merge/resources/ats/types/issue.py | 32 +- src/merge/resources/ats/types/job.py | 64 +- .../ats/types/job_interview_stage.py | 44 +- src/merge/resources/ats/types/job_posting.py | 54 +- src/merge/resources/ats/types/link_token.py | 31 +- .../ats/types/linked_account_status.py | 31 +- .../resources/ats/types/meta_response.py | 31 +- .../resources/ats/types/model_operation.py | 31 +- .../types/model_permission_deserializer.py | 31 +- .../model_permission_deserializer_request.py | 31 +- .../ats/types/multipart_form_field_request.py | 41 +- src/merge/resources/ats/types/offer.py | 54 +- src/merge/resources/ats/types/office.py | 42 +- ...inated_account_details_and_actions_list.py | 31 +- .../ats/types/paginated_activity_list.py | 31 +- .../ats/types/paginated_application_list.py | 31 +- .../ats/types/paginated_attachment_list.py | 31 +- .../types/paginated_audit_log_event_list.py | 31 +- .../ats/types/paginated_candidate_list.py | 31 +- .../ats/types/paginated_department_list.py | 31 +- .../ats/types/paginated_eeoc_list.py | 31 +- .../ats/types/paginated_issue_list.py | 31 +- .../paginated_job_interview_stage_list.py | 31 +- .../resources/ats/types/paginated_job_list.py | 31 +- .../ats/types/paginated_job_posting_list.py | 31 +- .../ats/types/paginated_offer_list.py | 31 +- .../ats/types/paginated_office_list.py | 31 +- .../ats/types/paginated_reject_reason_list.py | 31 +- .../ats/types/paginated_remote_user_list.py | 31 +- .../paginated_scheduled_interview_list.py | 31 +- .../ats/types/paginated_scorecard_list.py | 31 +- .../paginated_screening_question_list.py | 31 +- .../ats/types/paginated_sync_status_list.py | 31 +- .../resources/ats/types/paginated_tag_list.py | 31 +- .../ats/types/patched_candidate_request.py | 52 +- src/merge/resources/ats/types/phone_number.py | 38 +- .../ats/types/phone_number_request.py | 35 +- .../resources/ats/types/reject_reason.py | 40 +- src/merge/resources/ats/types/remote_data.py | 31 +- .../ats/types/remote_endpoint_info.py | 31 +- .../resources/ats/types/remote_field_api.py | 35 +- .../ats/types/remote_field_api_response.py | 69 +- src/merge/resources/ats/types/remote_key.py | 31 +- .../resources/ats/types/remote_response.py | 31 +- src/merge/resources/ats/types/remote_user.py | 50 +- .../ats/types/scheduled_interview.py | 60 +- .../ats/types/scheduled_interview_request.py | 46 +- .../ats/types/scheduled_interview_response.py | 31 +- src/merge/resources/ats/types/scorecard.py | 50 +- .../resources/ats/types/screening_question.py | 46 +- .../ats/types/screening_question_option.py | 38 +- src/merge/resources/ats/types/sync_status.py | 30 +- src/merge/resources/ats/types/tag.py | 40 +- src/merge/resources/ats/types/url.py | 38 +- src/merge/resources/ats/types/url_request.py | 35 +- .../ats/types/validation_problem_source.py | 31 +- .../ats/types/warning_validation_problem.py | 31 +- .../resources/ats/types/webhook_receiver.py | 31 +- .../crm/resources/account_details/client.py | 20 +- .../crm/resources/account_token/client.py | 24 +- .../crm/resources/accounts/client.py | 146 +- .../crm/resources/association_types/client.py | 114 +- .../crm/resources/associations/client.py | 56 +- .../crm/resources/async_passthrough/client.py | 52 +- .../crm/resources/audit_trail/client.py | 20 +- .../crm/resources/available_actions/client.py | 20 +- .../crm/resources/contacts/client.py | 174 +- .../resources/custom_object_classes/client.py | 42 +- .../crm/resources/custom_objects/client.py | 98 +- .../crm/resources/delete_account/client.py | 18 +- .../crm/resources/engagement_types/client.py | 60 +- .../crm/resources/engagements/client.py | 146 +- .../crm/resources/field_mapping/client.py | 132 +- .../crm/resources/force_resync/client.py | 20 +- .../crm/resources/generate_key/client.py | 24 +- .../resources/crm/resources/issues/client.py | 42 +- .../resources/crm/resources/leads/client.py | 100 +- .../crm/resources/link_token/client.py | 30 +- .../crm/resources/linked_accounts/client.py | 20 +- .../resources/crm/resources/notes/client.py | 100 +- .../crm/resources/opportunities/client.py | 146 +- .../crm/resources/passthrough/client.py | 30 +- .../crm/resources/regenerate_key/client.py | 24 +- .../resources/crm/resources/scopes/client.py | 100 +- .../resources/crm/resources/stages/client.py | 60 +- .../crm/resources/sync_status/client.py | 20 +- .../resources/crm/resources/tasks/client.py | 146 +- .../resources/crm/resources/users/client.py | 88 +- .../crm/resources/webhook_receivers/client.py | 44 +- src/merge/resources/crm/types/account.py | 54 +- .../resources/crm/types/account_details.py | 33 +- .../crm/types/account_details_and_actions.py | 35 +- ...account_details_and_actions_integration.py | 31 +- .../crm/types/account_integration.py | 47 +- .../resources/crm/types/account_request.py | 44 +- .../resources/crm/types/account_token.py | 31 +- src/merge/resources/crm/types/address.py | 48 +- .../resources/crm/types/address_request.py | 45 +- .../resources/crm/types/advanced_metadata.py | 31 +- src/merge/resources/crm/types/association.py | 36 +- .../crm/types/association_sub_type.py | 34 +- .../resources/crm/types/association_type.py | 38 +- .../types/association_type_request_request.py | 31 +- .../crm/types/async_passthrough_reciept.py | 31 +- .../resources/crm/types/audit_log_event.py | 38 +- .../resources/crm/types/available_actions.py | 31 +- .../crm/types/common_model_scope_api.py | 33 +- .../types/common_model_scopes_body_request.py | 31 +- src/merge/resources/crm/types/contact.py | 48 +- .../resources/crm/types/contact_request.py | 40 +- .../crm/types/crm_account_response.py | 31 +- .../types/crm_association_type_response.py | 31 +- .../crm/types/crm_contact_response.py | 31 +- .../crm/types/crm_custom_object_response.py | 31 +- .../resources/crm/types/custom_object.py | 40 +- .../crm/types/custom_object_class.py | 40 +- .../crm/types/custom_object_request.py | 31 +- .../crm/types/data_passthrough_request.py | 43 +- .../resources/crm/types/debug_mode_log.py | 31 +- .../crm/types/debug_model_log_summary.py | 31 +- .../resources/crm/types/email_address.py | 38 +- .../crm/types/email_address_request.py | 35 +- src/merge/resources/crm/types/engagement.py | 54 +- .../resources/crm/types/engagement_request.py | 46 +- .../crm/types/engagement_response.py | 31 +- .../resources/crm/types/engagement_type.py | 40 +- .../crm/types/error_validation_problem.py | 31 +- .../crm/types/external_target_field_api.py | 31 +- .../external_target_field_api_response.py | 55 +- .../crm/types/field_mapping_api_instance.py | 31 +- ...field_mapping_api_instance_remote_field.py | 35 +- ...tance_remote_field_remote_endpoint_info.py | 31 +- .../field_mapping_api_instance_response.py | 55 +- ...field_mapping_api_instance_target_field.py | 31 +- .../types/field_mapping_instance_response.py | 31 +- .../types/field_permission_deserializer.py | 31 +- .../field_permission_deserializer_request.py | 31 +- .../crm/types/ignore_common_model_request.py | 31 +- ...ividual_common_model_scope_deserializer.py | 31 +- ...common_model_scope_deserializer_request.py | 31 +- src/merge/resources/crm/types/issue.py | 32 +- src/merge/resources/crm/types/item_schema.py | 31 +- src/merge/resources/crm/types/lead.py | 58 +- src/merge/resources/crm/types/lead_request.py | 48 +- .../resources/crm/types/lead_response.py | 31 +- src/merge/resources/crm/types/link_token.py | 31 +- .../crm/types/linked_account_status.py | 31 +- .../resources/crm/types/meta_response.py | 31 +- .../resources/crm/types/model_operation.py | 31 +- .../types/model_permission_deserializer.py | 31 +- .../model_permission_deserializer_request.py | 31 +- .../crm/types/multipart_form_field_request.py | 41 +- src/merge/resources/crm/types/note.py | 50 +- src/merge/resources/crm/types/note_request.py | 41 +- .../resources/crm/types/note_response.py | 31 +- .../types/object_class_description_request.py | 31 +- src/merge/resources/crm/types/opportunity.py | 56 +- .../crm/types/opportunity_request.py | 48 +- .../crm/types/opportunity_response.py | 31 +- ...inated_account_details_and_actions_list.py | 31 +- .../crm/types/paginated_account_list.py | 31 +- .../crm/types/paginated_association_list.py | 31 +- .../types/paginated_association_type_list.py | 31 +- .../types/paginated_audit_log_event_list.py | 31 +- .../crm/types/paginated_contact_list.py | 31 +- .../paginated_custom_object_class_list.py | 31 +- .../crm/types/paginated_custom_object_list.py | 31 +- .../crm/types/paginated_engagement_list.py | 31 +- .../types/paginated_engagement_type_list.py | 31 +- .../crm/types/paginated_issue_list.py | 31 +- .../crm/types/paginated_lead_list.py | 31 +- .../crm/types/paginated_note_list.py | 31 +- .../crm/types/paginated_opportunity_list.py | 31 +- .../paginated_remote_field_class_list.py | 31 +- .../crm/types/paginated_stage_list.py | 31 +- .../crm/types/paginated_sync_status_list.py | 31 +- .../crm/types/paginated_task_list.py | 31 +- .../crm/types/paginated_user_list.py | 31 +- .../crm/types/patched_account_request.py | 44 +- .../crm/types/patched_contact_request.py | 40 +- .../crm/types/patched_engagement_request.py | 46 +- .../crm/types/patched_opportunity_request.py | 48 +- .../crm/types/patched_task_request.py | 46 +- src/merge/resources/crm/types/phone_number.py | 38 +- .../crm/types/phone_number_request.py | 35 +- src/merge/resources/crm/types/remote_data.py | 31 +- .../crm/types/remote_endpoint_info.py | 31 +- src/merge/resources/crm/types/remote_field.py | 31 +- .../resources/crm/types/remote_field_api.py | 35 +- .../crm/types/remote_field_api_response.py | 55 +- .../resources/crm/types/remote_field_class.py | 31 +- .../remote_field_class_field_choices_item.py | 31 +- ...ote_field_class_for_custom_object_class.py | 34 +- ..._custom_object_class_field_choices_item.py | 31 +- ...ass_for_custom_object_class_item_schema.py | 31 +- .../crm/types/remote_field_request.py | 31 +- src/merge/resources/crm/types/remote_key.py | 31 +- .../resources/crm/types/remote_response.py | 31 +- src/merge/resources/crm/types/stage.py | 40 +- src/merge/resources/crm/types/sync_status.py | 30 +- src/merge/resources/crm/types/task.py | 54 +- src/merge/resources/crm/types/task_request.py | 46 +- .../resources/crm/types/task_response.py | 31 +- src/merge/resources/crm/types/user.py | 44 +- .../crm/types/validation_problem_source.py | 31 +- .../crm/types/warning_validation_problem.py | 31 +- .../resources/crm/types/webhook_receiver.py | 31 +- .../resources/account_details/client.py | 20 +- .../resources/account_token/client.py | 24 +- .../resources/async_passthrough/client.py | 52 +- .../resources/audit_trail/client.py | 20 +- .../resources/available_actions/client.py | 20 +- .../resources/delete_account/client.py | 18 +- .../filestorage/resources/drives/client.py | 42 +- .../resources/field_mapping/client.py | 132 +- .../filestorage/resources/files/client.py | 118 +- .../filestorage/resources/folders/client.py | 82 +- .../resources/force_resync/client.py | 20 +- .../resources/generate_key/client.py | 24 +- .../filestorage/resources/groups/client.py | 42 +- .../filestorage/resources/issues/client.py | 42 +- .../resources/link_token/client.py | 30 +- .../resources/linked_accounts/client.py | 20 +- .../resources/passthrough/client.py | 30 +- .../resources/regenerate_key/client.py | 24 +- .../filestorage/resources/scopes/client.py | 100 +- .../resources/sync_status/client.py | 20 +- .../filestorage/resources/users/client.py | 42 +- .../resources/webhook_receivers/client.py | 44 +- .../filestorage/types/account_details.py | 33 +- .../types/account_details_and_actions.py | 35 +- ...account_details_and_actions_integration.py | 31 +- .../filestorage/types/account_integration.py | 47 +- .../filestorage/types/account_token.py | 31 +- .../filestorage/types/advanced_metadata.py | 31 +- .../types/async_passthrough_reciept.py | 31 +- .../filestorage/types/audit_log_event.py | 38 +- .../filestorage/types/available_actions.py | 31 +- .../types/common_model_scope_api.py | 33 +- .../types/common_model_scopes_body_request.py | 31 +- .../types/data_passthrough_request.py | 43 +- .../filestorage/types/debug_mode_log.py | 31 +- .../types/debug_model_log_summary.py | 31 +- .../resources/filestorage/types/drive.py | 44 +- .../types/error_validation_problem.py | 31 +- .../types/external_target_field_api.py | 31 +- .../external_target_field_api_response.py | 43 +- .../types/field_mapping_api_instance.py | 31 +- ...field_mapping_api_instance_remote_field.py | 35 +- ...tance_remote_field_remote_endpoint_info.py | 31 +- .../field_mapping_api_instance_response.py | 43 +- ...field_mapping_api_instance_target_field.py | 31 +- .../types/field_mapping_instance_response.py | 31 +- .../types/field_permission_deserializer.py | 31 +- .../field_permission_deserializer_request.py | 31 +- src/merge/resources/filestorage/types/file.py | 60 +- .../filestorage/types/file_request.py | 49 +- .../types/file_storage_file_response.py | 31 +- .../types/file_storage_folder_response.py | 31 +- .../resources/filestorage/types/folder.py | 58 +- .../filestorage/types/folder_request.py | 45 +- .../resources/filestorage/types/group.py | 42 +- ...ividual_common_model_scope_deserializer.py | 31 +- ...common_model_scope_deserializer_request.py | 31 +- .../resources/filestorage/types/issue.py | 32 +- .../resources/filestorage/types/link_token.py | 31 +- .../types/linked_account_status.py | 31 +- .../filestorage/types/meta_response.py | 31 +- .../filestorage/types/model_operation.py | 31 +- .../types/model_permission_deserializer.py | 31 +- .../model_permission_deserializer_request.py | 31 +- .../types/multipart_form_field_request.py | 41 +- ...inated_account_details_and_actions_list.py | 31 +- .../types/paginated_audit_log_event_list.py | 31 +- .../filestorage/types/paginated_drive_list.py | 31 +- .../filestorage/types/paginated_file_list.py | 31 +- .../types/paginated_folder_list.py | 31 +- .../filestorage/types/paginated_group_list.py | 31 +- .../filestorage/types/paginated_issue_list.py | 31 +- .../types/paginated_sync_status_list.py | 31 +- .../filestorage/types/paginated_user_list.py | 31 +- .../resources/filestorage/types/permission.py | 44 +- .../filestorage/types/permission_request.py | 41 +- .../filestorage/types/remote_endpoint_info.py | 31 +- .../filestorage/types/remote_field_api.py | 35 +- .../types/remote_field_api_response.py | 43 +- .../resources/filestorage/types/remote_key.py | 31 +- .../filestorage/types/remote_response.py | 31 +- .../filestorage/types/sync_status.py | 30 +- src/merge/resources/filestorage/types/user.py | 44 +- .../types/validation_problem_source.py | 31 +- .../types/warning_validation_problem.py | 31 +- .../filestorage/types/webhook_receiver.py | 31 +- .../hris/resources/account_details/client.py | 20 +- .../hris/resources/account_token/client.py | 24 +- .../resources/async_passthrough/client.py | 52 +- .../hris/resources/audit_trail/client.py | 20 +- .../resources/available_actions/client.py | 20 +- .../hris/resources/bank_info/client.py | 42 +- .../hris/resources/benefits/client.py | 42 +- .../hris/resources/companies/client.py | 42 +- .../hris/resources/delete_account/client.py | 18 +- .../hris/resources/dependents/client.py | 42 +- .../resources/employee_payroll_runs/client.py | 42 +- .../hris/resources/employees/client.py | 106 +- .../resources/employer_benefits/client.py | 42 +- .../hris/resources/employments/client.py | 42 +- .../hris/resources/field_mapping/client.py | 132 +- .../hris/resources/force_resync/client.py | 20 +- .../hris/resources/generate_key/client.py | 24 +- .../resources/hris/resources/groups/client.py | 42 +- .../resources/hris/resources/issues/client.py | 42 +- .../hris/resources/link_token/client.py | 30 +- .../hris/resources/linked_accounts/client.py | 20 +- .../hris/resources/locations/client.py | 42 +- .../hris/resources/passthrough/client.py | 30 +- .../hris/resources/pay_groups/client.py | 42 +- .../hris/resources/payroll_runs/client.py | 42 +- .../hris/resources/regenerate_key/client.py | 24 +- .../resources/hris/resources/scopes/client.py | 100 +- .../hris/resources/sync_status/client.py | 20 +- .../resources/hris/resources/teams/client.py | 42 +- .../hris/resources/time_off/client.py | 82 +- .../resources/time_off_balances/client.py | 42 +- .../resources/timesheet_entries/client.py | 82 +- .../resources/webhook_receivers/client.py | 44 +- .../resources/hris/types/account_details.py | 33 +- .../hris/types/account_details_and_actions.py | 35 +- ...account_details_and_actions_integration.py | 31 +- .../hris/types/account_integration.py | 47 +- .../resources/hris/types/account_token.py | 31 +- .../resources/hris/types/advanced_metadata.py | 31 +- .../hris/types/async_passthrough_reciept.py | 31 +- .../resources/hris/types/audit_log_event.py | 38 +- .../resources/hris/types/available_actions.py | 31 +- src/merge/resources/hris/types/bank_info.py | 50 +- src/merge/resources/hris/types/benefit.py | 54 +- .../hris/types/common_model_scope_api.py | 33 +- .../types/common_model_scopes_body_request.py | 31 +- src/merge/resources/hris/types/company.py | 44 +- .../hris/types/data_passthrough_request.py | 43 +- .../resources/hris/types/debug_mode_log.py | 31 +- .../hris/types/debug_model_log_summary.py | 31 +- src/merge/resources/hris/types/deduction.py | 44 +- src/merge/resources/hris/types/dependent.py | 60 +- src/merge/resources/hris/types/earning.py | 42 +- src/merge/resources/hris/types/employee.py | 94 +- .../hris/types/employee_payroll_run.py | 52 +- .../resources/hris/types/employee_request.py | 82 +- .../resources/hris/types/employee_response.py | 31 +- .../resources/hris/types/employer_benefit.py | 46 +- src/merge/resources/hris/types/employment.py | 60 +- .../hris/types/error_validation_problem.py | 31 +- .../hris/types/external_target_field_api.py | 31 +- .../external_target_field_api_response.py | 69 +- .../hris/types/field_mapping_api_instance.py | 31 +- ...field_mapping_api_instance_remote_field.py | 35 +- ...tance_remote_field_remote_endpoint_info.py | 31 +- .../field_mapping_api_instance_response.py | 69 +- ...field_mapping_api_instance_target_field.py | 31 +- .../types/field_mapping_instance_response.py | 31 +- .../types/field_permission_deserializer.py | 31 +- .../field_permission_deserializer_request.py | 31 +- src/merge/resources/hris/types/group.py | 46 +- ...ividual_common_model_scope_deserializer.py | 31 +- ...common_model_scope_deserializer_request.py | 31 +- src/merge/resources/hris/types/issue.py | 32 +- src/merge/resources/hris/types/link_token.py | 31 +- .../hris/types/linked_account_status.py | 31 +- src/merge/resources/hris/types/location.py | 56 +- .../resources/hris/types/meta_response.py | 31 +- .../resources/hris/types/model_operation.py | 31 +- .../types/model_permission_deserializer.py | 31 +- .../model_permission_deserializer_request.py | 31 +- .../types/multipart_form_field_request.py | 41 +- ...inated_account_details_and_actions_list.py | 31 +- .../types/paginated_audit_log_event_list.py | 31 +- .../hris/types/paginated_bank_info_list.py | 31 +- .../hris/types/paginated_benefit_list.py | 31 +- .../hris/types/paginated_company_list.py | 31 +- .../hris/types/paginated_dependent_list.py | 31 +- .../hris/types/paginated_employee_list.py | 31 +- .../paginated_employee_payroll_run_list.py | 31 +- .../types/paginated_employer_benefit_list.py | 31 +- .../hris/types/paginated_employment_list.py | 31 +- .../hris/types/paginated_group_list.py | 31 +- .../hris/types/paginated_issue_list.py | 31 +- .../hris/types/paginated_location_list.py | 31 +- .../hris/types/paginated_pay_group_list.py | 31 +- .../hris/types/paginated_payroll_run_list.py | 31 +- .../hris/types/paginated_sync_status_list.py | 31 +- .../hris/types/paginated_team_list.py | 31 +- .../types/paginated_time_off_balance_list.py | 31 +- .../hris/types/paginated_time_off_list.py | 31 +- .../types/paginated_timesheet_entry_list.py | 31 +- src/merge/resources/hris/types/pay_group.py | 40 +- src/merge/resources/hris/types/payroll_run.py | 48 +- src/merge/resources/hris/types/remote_data.py | 31 +- .../hris/types/remote_endpoint_info.py | 31 +- .../resources/hris/types/remote_field_api.py | 35 +- .../hris/types/remote_field_api_response.py | 69 +- src/merge/resources/hris/types/remote_key.py | 31 +- .../resources/hris/types/remote_response.py | 31 +- src/merge/resources/hris/types/sync_status.py | 30 +- src/merge/resources/hris/types/tax.py | 44 +- src/merge/resources/hris/types/team.py | 44 +- src/merge/resources/hris/types/time_off.py | 54 +- .../resources/hris/types/time_off_balance.py | 46 +- .../resources/hris/types/time_off_request.py | 48 +- .../resources/hris/types/time_off_response.py | 31 +- .../resources/hris/types/timesheet_entry.py | 46 +- .../hris/types/timesheet_entry_request.py | 38 +- .../hris/types/timesheet_entry_response.py | 31 +- .../hris/types/validation_problem_source.py | 31 +- .../hris/types/warning_validation_problem.py | 31 +- .../resources/hris/types/webhook_receiver.py | 31 +- .../resources/account_details/client.py | 20 +- .../resources/account_token/client.py | 24 +- .../ticketing/resources/accounts/client.py | 42 +- .../resources/async_passthrough/client.py | 52 +- .../ticketing/resources/attachments/client.py | 118 +- .../ticketing/resources/audit_trail/client.py | 20 +- .../resources/available_actions/client.py | 20 +- .../ticketing/resources/collections/client.py | 64 +- .../ticketing/resources/comments/client.py | 82 +- .../ticketing/resources/contacts/client.py | 82 +- .../resources/delete_account/client.py | 18 +- .../resources/field_mapping/client.py | 132 +- .../resources/force_resync/client.py | 20 +- .../resources/generate_key/client.py | 24 +- .../ticketing/resources/issues/client.py | 42 +- .../ticketing/resources/link_token/client.py | 30 +- .../resources/linked_accounts/client.py | 20 +- .../ticketing/resources/passthrough/client.py | 30 +- .../ticketing/resources/projects/client.py | 64 +- .../resources/regenerate_key/client.py | 24 +- .../ticketing/resources/roles/client.py | 42 +- .../ticketing/resources/scopes/client.py | 100 +- .../ticketing/resources/sync_status/client.py | 20 +- .../ticketing/resources/tags/client.py | 42 +- .../ticketing/resources/teams/client.py | 42 +- .../ticketing/resources/tickets/client.py | 168 +- .../ticketing/resources/users/client.py | 42 +- .../resources/webhook_receivers/client.py | 44 +- .../resources/ticketing/types/account.py | 42 +- .../ticketing/types/account_details.py | 33 +- .../types/account_details_and_actions.py | 35 +- ...account_details_and_actions_integration.py | 31 +- .../ticketing/types/account_integration.py | 47 +- .../ticketing/types/account_token.py | 31 +- .../ticketing/types/advanced_metadata.py | 31 +- .../types/async_passthrough_reciept.py | 31 +- .../resources/ticketing/types/attachment.py | 50 +- .../ticketing/types/attachment_request.py | 41 +- .../ticketing/types/audit_log_event.py | 38 +- .../ticketing/types/available_actions.py | 31 +- .../resources/ticketing/types/collection.py | 50 +- .../resources/ticketing/types/comment.py | 50 +- .../ticketing/types/comment_request.py | 43 +- .../ticketing/types/comment_response.py | 31 +- .../ticketing/types/common_model_scope_api.py | 33 +- .../types/common_model_scopes_body_request.py | 31 +- .../resources/ticketing/types/contact.py | 48 +- .../ticketing/types/contact_request.py | 41 +- .../types/data_passthrough_request.py | 43 +- .../ticketing/types/debug_mode_log.py | 31 +- .../types/debug_model_log_summary.py | 31 +- .../types/error_validation_problem.py | 31 +- .../types/external_target_field_api.py | 31 +- .../external_target_field_api_response.py | 59 +- .../types/field_mapping_api_instance.py | 31 +- ...field_mapping_api_instance_remote_field.py | 35 +- ...tance_remote_field_remote_endpoint_info.py | 31 +- .../field_mapping_api_instance_response.py | 59 +- ...field_mapping_api_instance_target_field.py | 31 +- .../types/field_mapping_instance_response.py | 31 +- .../types/field_permission_deserializer.py | 31 +- .../field_permission_deserializer_request.py | 31 +- ...ividual_common_model_scope_deserializer.py | 31 +- ...common_model_scope_deserializer_request.py | 31 +- src/merge/resources/ticketing/types/issue.py | 32 +- .../resources/ticketing/types/item_schema.py | 31 +- .../resources/ticketing/types/link_token.py | 31 +- .../ticketing/types/linked_account_status.py | 31 +- .../ticketing/types/meta_response.py | 31 +- .../ticketing/types/model_operation.py | 31 +- .../types/model_permission_deserializer.py | 31 +- .../model_permission_deserializer_request.py | 31 +- .../types/multipart_form_field_request.py | 41 +- ...inated_account_details_and_actions_list.py | 31 +- .../ticketing/types/paginated_account_list.py | 31 +- .../types/paginated_attachment_list.py | 31 +- .../types/paginated_audit_log_event_list.py | 31 +- .../types/paginated_collection_list.py | 31 +- .../ticketing/types/paginated_comment_list.py | 31 +- .../ticketing/types/paginated_contact_list.py | 31 +- .../ticketing/types/paginated_issue_list.py | 31 +- .../ticketing/types/paginated_project_list.py | 31 +- .../paginated_remote_field_class_list.py | 31 +- .../ticketing/types/paginated_role_list.py | 31 +- .../types/paginated_sync_status_list.py | 31 +- .../ticketing/types/paginated_tag_list.py | 31 +- .../ticketing/types/paginated_team_list.py | 31 +- .../ticketing/types/paginated_ticket_list.py | 31 +- .../ticketing/types/paginated_user_list.py | 31 +- .../ticketing/types/patched_ticket_request.py | 54 +- .../resources/ticketing/types/project.py | 42 +- .../resources/ticketing/types/remote_data.py | 31 +- .../ticketing/types/remote_endpoint_info.py | 31 +- .../resources/ticketing/types/remote_field.py | 31 +- .../ticketing/types/remote_field_api.py | 35 +- .../types/remote_field_api_response.py | 59 +- .../ticketing/types/remote_field_class.py | 31 +- .../remote_field_class_field_choices_item.py | 31 +- .../ticketing/types/remote_field_request.py | 31 +- .../resources/ticketing/types/remote_key.py | 31 +- .../ticketing/types/remote_response.py | 31 +- src/merge/resources/ticketing/types/role.py | 44 +- .../resources/ticketing/types/sync_status.py | 30 +- src/merge/resources/ticketing/types/tag.py | 40 +- src/merge/resources/ticketing/types/team.py | 42 +- src/merge/resources/ticketing/types/ticket.py | 66 +- .../ticketing/types/ticket_request.py | 54 +- .../ticketing/types/ticket_response.py | 31 +- .../types/ticketing_attachment_response.py | 31 +- .../types/ticketing_contact_response.py | 31 +- src/merge/resources/ticketing/types/user.py | 46 +- .../types/validation_problem_source.py | 31 +- .../types/warning_validation_problem.py | 31 +- .../ticketing/types/webhook_receiver.py | 31 +- tests/utils/test_http_client.py | 47 + tests/utils/test_query_encoding.py | 13 + 779 files changed, 58202 insertions(+), 16582 deletions(-) create mode 100644 reference.md create mode 100644 tests/utils/test_http_client.py create mode 100644 tests/utils/test_query_encoding.py diff --git a/poetry.lock b/poetry.lock index a2577970..464f6035 100644 --- a/poetry.lock +++ b/poetry.lock @@ -38,13 +38,13 @@ trio = ["trio (>=0.23)"] [[package]] name = "certifi" -version = "2024.6.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] @@ -60,13 +60,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -235,109 +235,122 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pydantic" -version = "2.7.4" +version = "2.8.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"}, - {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"}, + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.18.4" -typing-extensions = ">=4.6.1" +pydantic-core = "2.20.1" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] [package.extras] email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.18.4" +version = "2.20.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, - {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, - {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, - {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, - {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, - {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, - {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, - {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, - {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, - {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, - {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, - {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, - {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, - {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, ] [package.dependencies] @@ -367,13 +380,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest-asyncio" -version = "0.23.7" +version = "0.23.8" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest_asyncio-0.23.7-py3-none-any.whl", hash = "sha256:009b48127fbe44518a547bddd25611551b0e43ccdbf1e67d12479f569832c20b"}, - {file = "pytest_asyncio-0.23.7.tar.gz", hash = "sha256:5f5c72948f4c49e7db4f29f2521d4031f1c27f86e57b046126654083d4770268"}, + {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, + {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, ] [package.dependencies] @@ -455,4 +468,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "7fa2085bd251148908cf9a89f13b158fe85ccb037bb44614ae5f150ceecee53c" +content-hash = "547951903d3bbcefb041f1f3a6ef8d5414ee7c6d96c5131d6197bfc91bc6229a" diff --git a/pyproject.toml b/pyproject.toml index 1b7203e3..43dd38c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "MergePythonClient" -version = "1.0.12" +version = "1.1.0rc0" description = "" readme = "README.md" authors = [] @@ -34,6 +34,7 @@ Repository = 'https://github.com/merge-api/merge-python-client' python = "^3.8" httpx = ">=0.21.2" pydantic = ">= 1.9.2" +pydantic-core = "^2.18.2" typing_extensions = ">= 4.0.0" [tool.poetry.dev-dependencies] diff --git a/reference.md b/reference.md new file mode 100644 index 00000000..a9958fd8 --- /dev/null +++ b/reference.md @@ -0,0 +1,43086 @@ +# Reference +## Filestorage AccountDetails +
client.filestorage.account_details.retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get details for a linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.account_details.retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage AccountToken +
client.filestorage.account_token.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns the account token for the end user with the provided public token. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.account_token.retrieve( + public_token="public_token", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**public_token:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage AsyncPassthrough +
client.filestorage.async_passthrough.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Asynchronously pull data from an endpoint not currently supported by Merge. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.filestorage import DataPassthroughRequest, MethodEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.async_passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request:** `DataPassthroughRequest` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.async_passthrough.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Retrieves data from earlier async-passthrough POST request +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.async_passthrough.retrieve( + async_passthrough_receipt_id="async_passthrough_receipt_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**async_passthrough_receipt_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage AuditTrail +
client.filestorage.audit_trail.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Gets a list of audit trail events. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.audit_trail.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_date:** `typing.Optional[str]` — If included, will only include audit trail events that occurred before this time + +
+
+ +
+
+ +**event_type:** `typing.Optional[str]` — If included, will only include events with the given event type. Possible values include: `CREATED_REMOTE_PRODUCTION_API_KEY`, `DELETED_REMOTE_PRODUCTION_API_KEY`, `CREATED_TEST_API_KEY`, `DELETED_TEST_API_KEY`, `REGENERATED_PRODUCTION_API_KEY`, `INVITED_USER`, `TWO_FACTOR_AUTH_ENABLED`, `TWO_FACTOR_AUTH_DISABLED`, `DELETED_LINKED_ACCOUNT`, `CREATED_DESTINATION`, `DELETED_DESTINATION`, `CHANGED_DESTINATION`, `CHANGED_SCOPES`, `CHANGED_PERSONAL_INFORMATION`, `CHANGED_ORGANIZATION_SETTINGS`, `ENABLED_INTEGRATION`, `DISABLED_INTEGRATION`, `ENABLED_CATEGORY`, `DISABLED_CATEGORY`, `CHANGED_PASSWORD`, `RESET_PASSWORD`, `ENABLED_REDACT_UNMAPPED_DATA_FOR_ORGANIZATION`, `ENABLED_REDACT_UNMAPPED_DATA_FOR_LINKED_ACCOUNT`, `DISABLED_REDACT_UNMAPPED_DATA_FOR_ORGANIZATION`, `DISABLED_REDACT_UNMAPPED_DATA_FOR_LINKED_ACCOUNT`, `CREATED_INTEGRATION_WIDE_FIELD_MAPPING`, `CREATED_LINKED_ACCOUNT_FIELD_MAPPING`, `CHANGED_INTEGRATION_WIDE_FIELD_MAPPING`, `CHANGED_LINKED_ACCOUNT_FIELD_MAPPING`, `DELETED_INTEGRATION_WIDE_FIELD_MAPPING`, `DELETED_LINKED_ACCOUNT_FIELD_MAPPING`, `FORCED_LINKED_ACCOUNT_RESYNC`, `MUTED_ISSUE`, `GENERATED_MAGIC_LINK`, `ENABLED_MERGE_WEBHOOK`, `DISABLED_MERGE_WEBHOOK`, `MERGE_WEBHOOK_TARGET_CHANGED` + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**start_date:** `typing.Optional[str]` — If included, will only include audit trail events that occurred after this time + +
+
+ +
+
+ +**user_email:** `typing.Optional[str]` — If provided, this will return events associated with the specified user email. Please note that the email address reflects the user's email at the time of the event, and may not be their current email. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage AvailableActions +
client.filestorage.available_actions.retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of models and actions available for an account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.available_actions.retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage Scopes +
client.filestorage.scopes.default_scopes_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get the default permissions for Merge Common Models and fields across all Linked Accounts of a given category. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.scopes.default_scopes_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.scopes.linked_account_scopes_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all available permissions for Merge Common Models and fields for a single Linked Account. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.scopes.linked_account_scopes_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.scopes.linked_account_scopes_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update permissions for any Common Model or field for a single Linked Account. Any Scopes not set in this POST request will inherit the default Scopes. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes) +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.filestorage import ( + IndividualCommonModelScopeDeserializerRequest, + ModelPermissionDeserializerRequest, +) + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.scopes.linked_account_scopes_create( + common_models=[ + IndividualCommonModelScopeDeserializerRequest( + model_name="Employee", + model_permissions={ + "READ": ModelPermissionDeserializerRequest( + is_enabled=True, + ), + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ), + }, + ), + IndividualCommonModelScopeDeserializerRequest( + model_name="Benefit", + model_permissions={ + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ) + }, + ), + ], +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**common_models:** `typing.Sequence[IndividualCommonModelScopeDeserializerRequest]` — The common models you want to update the scopes for + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage DeleteAccount +
client.filestorage.delete_account.delete() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.delete_account.delete() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage Drives +
client.filestorage.drives.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Drive` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.drives.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**name:** `typing.Optional[str]` — If provided, will only return drives with this name. This performs an exact match. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.drives.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Drive` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.drives.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage FieldMapping +
client.filestorage.field_mapping.field_mappings_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all Field Mappings for this Linked Account. Field Mappings are mappings between third-party Remote Fields and user defined Merge fields. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/overview/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.field_mapping.field_mappings_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.field_mapping.field_mappings_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create new Field Mappings that will be available after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.field_mapping.field_mappings_create( + target_field_name="example_target_field_name", + target_field_description="this is a example description of the target field", + remote_field_traversal_path=["example_remote_field"], + remote_method="GET", + remote_url_path="/example-url-path", + common_model_name="ExampleCommonModel", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**target_field_name:** `str` — The name of the target field you want this remote field to map to. + +
+
+ +
+
+ +**target_field_description:** `str` — The description of the target field you want this remote field to map to. + +
+
+ +
+
+ +**remote_field_traversal_path:** `typing.Sequence[typing.Any]` — The field traversal path of the remote field listed when you hit the GET /remote-fields endpoint. + +
+
+ +
+
+ +**remote_method:** `str` — The method of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**remote_url_path:** `str` — The path of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**common_model_name:** `str` — The name of the Common Model that the remote field corresponds to in a given category. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.field_mapping.field_mappings_destroy(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Deletes Field Mappings for a Linked Account. All data related to this Field Mapping will be deleted and these changes will be reflected after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.field_mapping.field_mappings_destroy( + field_mapping_id="field_mapping_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**field_mapping_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.field_mapping.field_mappings_partial_update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create or update existing Field Mappings for a Linked Account. Changes will be reflected after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.field_mapping.field_mappings_partial_update( + field_mapping_id="field_mapping_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**field_mapping_id:** `str` + +
+
+ +
+
+ +**remote_field_traversal_path:** `typing.Optional[typing.Sequence[typing.Any]]` — The field traversal path of the remote field listed when you hit the GET /remote-fields endpoint. + +
+
+ +
+
+ +**remote_method:** `typing.Optional[str]` — The method of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**remote_url_path:** `typing.Optional[str]` — The path of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.field_mapping.remote_fields_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all remote fields for a Linked Account. Remote fields are third-party fields that are accessible after initial sync if remote_data is enabled. You can use remote fields to override existing Merge fields or map a new Merge field. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/overview/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.field_mapping.remote_fields_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**common_models:** `typing.Optional[str]` — A comma seperated list of Common Model names. If included, will only return Remote Fields for those Common Models. + +
+
+ +
+
+ +**include_example_values:** `typing.Optional[str]` — If true, will include example values, where available, for remote fields in the 3rd party platform. These examples come from active data from your customers. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.field_mapping.target_fields_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all organization-wide Target Fields, this will not include any Linked Account specific Target Fields. Organization-wide Target Fields are additional fields appended to the Merge Common Model for all Linked Accounts in a category. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/target-fields/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.field_mapping.target_fields_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage Files +
client.filestorage.files.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `File` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.files.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**drive_id:** `typing.Optional[str]` — Specifying a drive id returns only the files in that drive. Specifying null returns only the files outside the top-level drive. + +
+
+ +
+
+ +**expand:** `typing.Optional[FilesListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**folder_id:** `typing.Optional[str]` — Specifying a folder id returns only the files in that folder. Specifying null returns only the files in root directory. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**mime_type:** `typing.Optional[str]` — If provided, will only return files with these mime_types. Multiple values can be separated by commas. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**name:** `typing.Optional[str]` — If provided, will only return files with this name. This performs an exact match. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.files.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `File` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.filestorage import FileRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.files.create( + model=FileRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `FileRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.files.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `File` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.files.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[FilesRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.files.download_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `File` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.files.download_retrieve( + id="string", + mime_type="string", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**mime_type:** `typing.Optional[str]` — If provided, specifies the export format of the file to be downloaded. For information on supported export formats, please refer to our export format help center article. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.files.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `FileStorageFile` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.files.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage Folders +
client.filestorage.folders.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Folder` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.folders.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**drive_id:** `typing.Optional[str]` — If provided, will only return folders in this drive. + +
+
+ +
+
+ +**expand:** `typing.Optional[FoldersListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**name:** `typing.Optional[str]` — If provided, will only return folders with this name. This performs an exact match. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**parent_folder_id:** `typing.Optional[str]` — If provided, will only return folders in this parent folder. If null, will return folders in root directory. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.folders.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `Folder` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.filestorage import FolderRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.folders.create( + model=FolderRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `FolderRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.folders.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Folder` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.folders.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[FoldersRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.folders.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `FileStorageFolder` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.folders.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage GenerateKey +
client.filestorage.generate_key.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a remote key. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.generate_key.create( + name="Remote Deployment Key 1", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**name:** `str` — The name of the remote key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage Groups +
client.filestorage.groups.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Group` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.groups.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.groups.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Group` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.groups.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage Issues +
client.filestorage.issues.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Gets issues. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.issues.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**account_token:** `typing.Optional[str]` + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_date:** `typing.Optional[str]` — If included, will only include issues whose most recent action occurred before this time + +
+
+ +
+
+ +**end_user_organization_name:** `typing.Optional[str]` + +
+
+ +
+
+ +**first_incident_time_after:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose first incident time was after this datetime. + +
+
+ +
+
+ +**first_incident_time_before:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose first incident time was before this datetime. + +
+
+ +
+
+ +**include_muted:** `typing.Optional[str]` — If true, will include muted issues + +
+
+ +
+
+ +**integration_name:** `typing.Optional[str]` + +
+
+ +
+
+ +**last_incident_time_after:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose last incident time was after this datetime. + +
+
+ +
+
+ +**last_incident_time_before:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose last incident time was before this datetime. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**start_date:** `typing.Optional[str]` — If included, will only include issues whose most recent action occurred after this time + +
+
+ +
+
+ +**status:** `typing.Optional[IssuesListRequestStatus]` + +Status of the issue. Options: ('ONGOING', 'RESOLVED') + +- `ONGOING` - ONGOING +- `RESOLVED` - RESOLVED + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.issues.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific issue. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.issues.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage LinkToken +
client.filestorage.link_token.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a link token to be used when linking a new end user. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.filestorage import CategoriesEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.link_token.create( + end_user_email_address="example@gmail.com", + end_user_organization_name="Test Organization", + end_user_origin_id="12345", + categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**end_user_email_address:** `str` — Your end user's email address. This is purely for identification purposes - setting this value will not cause any emails to be sent. + +
+
+ +
+
+ +**end_user_organization_name:** `str` — Your end user's organization. + +
+
+ +
+
+ +**end_user_origin_id:** `str` — This unique identifier typically represents the ID for your end user in your product's database. This value must be distinct from other Linked Accounts' unique identifiers. + +
+
+ +
+
+ +**categories:** `typing.Sequence[CategoriesEnum]` — The integration categories to show in Merge Link. + +
+
+ +
+
+ +**integration:** `typing.Optional[str]` — The slug of a specific pre-selected integration for this linking flow token. For examples of slugs, see https://docs.merge.dev/guides/merge-link/single-integration/. + +
+
+ +
+
+ +**link_expiry_mins:** `typing.Optional[int]` — An integer number of minutes between [30, 720 or 10080 if for a Magic Link URL] for how long this token is valid. Defaults to 30. + +
+
+ +
+
+ +**should_create_magic_link_url:** `typing.Optional[bool]` — Whether to generate a Magic Link URL. Defaults to false. For more information on Magic Link, see https://merge.dev/blog/integrations-fast-say-hello-to-magic-link. + +
+
+ +
+
+ +**common_models:** `typing.Optional[typing.Sequence[CommonModelScopesBodyRequest]]` — An array of objects to specify the models and fields that will be disabled for a given Linked Account. Each object uses model_id, enabled_actions, and disabled_fields to specify the model, method, and fields that are scoped for a given Linked Account. + +
+
+ +
+
+ +**category_common_model_scopes:** `typing.Optional[ + typing.Dict[ + str, + typing.Optional[ + typing.Sequence[IndividualCommonModelScopeDeserializerRequest] + ], + ] +]` — When creating a Link Token, you can set permissions for Common Models that will apply to the account that is going to be linked. Any model or field not specified in link token payload will default to existing settings. + +
+
+ +
+
+ +**language:** `typing.Optional[str]` — The language code for the language to localize Merge Link to. + +
+
+ +
+
+ +**integration_specific_config:** `typing.Optional[typing.Dict[str, typing.Any]]` — A JSON object containing integration-specific configuration options. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage LinkedAccounts +
client.filestorage.linked_accounts.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +List linked accounts for your organization. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.linked_accounts.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**category:** `typing.Optional[LinkedAccountsListRequestCategory]` + +Options: ('hris', 'ats', 'accounting', 'ticketing', 'crm', 'mktg', 'filestorage') + +- `hris` - hris +- `ats` - ats +- `accounting` - accounting +- `ticketing` - ticketing +- `crm` - crm +- `mktg` - mktg +- `filestorage` - filestorage + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_user_email_address:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given email address. + +
+
+ +
+
+ +**end_user_organization_name:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given organization name. + +
+
+ +
+
+ +**end_user_origin_id:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given origin ID. + +
+
+ +
+
+ +**end_user_origin_ids:** `typing.Optional[str]` — Comma-separated list of EndUser origin IDs, making it possible to specify multiple EndUsers at once. + +
+
+ +
+
+ +**id:** `typing.Optional[str]` + +
+
+ +
+
+ +**ids:** `typing.Optional[str]` — Comma-separated list of LinkedAccount IDs, making it possible to specify multiple LinkedAccounts at once. + +
+
+ +
+
+ +**include_duplicates:** `typing.Optional[bool]` — If `true`, will include complete production duplicates of the account specified by the `id` query parameter in the response. `id` must be for a complete production linked account. + +
+
+ +
+
+ +**integration_name:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given integration name. + +
+
+ +
+
+ +**is_test_account:** `typing.Optional[str]` — If included, will only include test linked accounts. If not included, will only include non-test linked accounts. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**status:** `typing.Optional[str]` — Filter by status. Options: `COMPLETE`, `INCOMPLETE`, `RELINK_NEEDED` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage Passthrough +
client.filestorage.passthrough.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Pull data from an endpoint not currently supported by Merge. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.filestorage import DataPassthroughRequest, MethodEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request:** `DataPassthroughRequest` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage RegenerateKey +
client.filestorage.regenerate_key.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Exchange remote keys. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.regenerate_key.create( + name="Remote Deployment Key 1", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**name:** `str` — The name of the remote key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage SyncStatus +
client.filestorage.sync_status.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get syncing status. Possible values: `DISABLED`, `DONE`, `FAILED`, `PARTIALLY_SYNCED`, `PAUSED`, `SYNCING`. Learn more about sync status in our [Help Center](https://help.merge.dev/en/articles/8184193-merge-sync-statuses). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.sync_status.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage ForceResync +
client.filestorage.force_resync.sync_status_resync_create() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Force re-sync of all models. This is available for all organizations via the dashboard. Force re-sync is also available programmatically via API for monthly, quarterly, and highest sync frequency customers on the Launch, Professional, or Enterprise plans. Doing so will consume a sync credit for the relevant linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.force_resync.sync_status_resync_create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage Users +
client.filestorage.users.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `User` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.users.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**is_me:** `typing.Optional[str]` — If provided, will only return the user object for requestor. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.users.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `User` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.users.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Filestorage WebhookReceivers +
client.filestorage.webhook_receivers.list() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `WebhookReceiver` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.webhook_receivers.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.filestorage.webhook_receivers.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `WebhookReceiver` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.filestorage.webhook_receivers.create( + event="event", + is_active=True, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**event:** `str` + +
+
+ +
+
+ +**is_active:** `bool` + +
+
+ +
+
+ +**key:** `typing.Optional[str]` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats AccountDetails +
client.ats.account_details.retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get details for a linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.account_details.retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats AccountToken +
client.ats.account_token.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns the account token for the end user with the provided public token. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.account_token.retrieve( + public_token="public_token", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**public_token:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats Activities +
client.ats.activities.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Activity` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.activities.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["user"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[ActivitiesListRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[ActivitiesListRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**user_id:** `typing.Optional[str]` — If provided, will only return activities done by this user. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.activities.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates an `Activity` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ats import ActivityRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.activities.create( + model=ActivityRequest(), + remote_user_id="remote_user_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `ActivityRequest` + +
+
+ +
+
+ +**remote_user_id:** `str` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.activities.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Activity` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.activities.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["user"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[ActivitiesRetrieveRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[ActivitiesRetrieveRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.activities.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Activity` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.activities.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats Applications +
client.ats.applications.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Application` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.applications.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**candidate_id:** `typing.Optional[str]` — If provided, will only return applications for this candidate. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**credited_to_id:** `typing.Optional[str]` — If provided, will only return applications credited to this user. + +
+
+ +
+
+ +**current_stage_id:** `typing.Optional[str]` — If provided, will only return applications at this interview stage. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[ApplicationsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**job_id:** `typing.Optional[str]` — If provided, will only return applications for this job. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**reject_reason_id:** `typing.Optional[str]` — If provided, will only return applications with this reject reason. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**source:** `typing.Optional[str]` — If provided, will only return applications with this source. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.applications.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates an `Application` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ats import ApplicationRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.applications.create( + model=ApplicationRequest(), + remote_user_id="remote_user_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `ApplicationRequest` + +
+
+ +
+
+ +**remote_user_id:** `str` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.applications.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Application` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.applications.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[ApplicationsRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.applications.change_stage_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Updates the `current_stage` field of an `Application` object +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.applications.change_stage_create( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**job_interview_stage:** `typing.Optional[str]` — The interview stage to move the application to. + +
+
+ +
+
+ +**remote_user_id:** `typing.Optional[str]` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.applications.meta_post_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Application` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.applications.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**application_remote_template_id:** `typing.Optional[str]` — The template ID associated with the nested application in the request. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats AsyncPassthrough +
client.ats.async_passthrough.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Asynchronously pull data from an endpoint not currently supported by Merge. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ats import DataPassthroughRequest, MethodEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.async_passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request:** `DataPassthroughRequest` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.async_passthrough.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Retrieves data from earlier async-passthrough POST request +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.async_passthrough.retrieve( + async_passthrough_receipt_id="async_passthrough_receipt_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**async_passthrough_receipt_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats Attachments +
client.ats.attachments.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Attachment` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.attachments.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**candidate_id:** `typing.Optional[str]` — If provided, will only return attachments for this candidate. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["candidate"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["attachment_type"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["attachment_type"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.attachments.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates an `Attachment` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ats import AttachmentRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.attachments.create( + model=AttachmentRequest(), + remote_user_id="remote_user_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `AttachmentRequest` + +
+
+ +
+
+ +**remote_user_id:** `str` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.attachments.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Attachment` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.attachments.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["candidate"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["attachment_type"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["attachment_type"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.attachments.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Attachment` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.attachments.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats AuditTrail +
client.ats.audit_trail.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Gets a list of audit trail events. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.audit_trail.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_date:** `typing.Optional[str]` — If included, will only include audit trail events that occurred before this time + +
+
+ +
+
+ +**event_type:** `typing.Optional[str]` — If included, will only include events with the given event type. Possible values include: `CREATED_REMOTE_PRODUCTION_API_KEY`, `DELETED_REMOTE_PRODUCTION_API_KEY`, `CREATED_TEST_API_KEY`, `DELETED_TEST_API_KEY`, `REGENERATED_PRODUCTION_API_KEY`, `INVITED_USER`, `TWO_FACTOR_AUTH_ENABLED`, `TWO_FACTOR_AUTH_DISABLED`, `DELETED_LINKED_ACCOUNT`, `CREATED_DESTINATION`, `DELETED_DESTINATION`, `CHANGED_DESTINATION`, `CHANGED_SCOPES`, `CHANGED_PERSONAL_INFORMATION`, `CHANGED_ORGANIZATION_SETTINGS`, `ENABLED_INTEGRATION`, `DISABLED_INTEGRATION`, `ENABLED_CATEGORY`, `DISABLED_CATEGORY`, `CHANGED_PASSWORD`, `RESET_PASSWORD`, `ENABLED_REDACT_UNMAPPED_DATA_FOR_ORGANIZATION`, `ENABLED_REDACT_UNMAPPED_DATA_FOR_LINKED_ACCOUNT`, `DISABLED_REDACT_UNMAPPED_DATA_FOR_ORGANIZATION`, `DISABLED_REDACT_UNMAPPED_DATA_FOR_LINKED_ACCOUNT`, `CREATED_INTEGRATION_WIDE_FIELD_MAPPING`, `CREATED_LINKED_ACCOUNT_FIELD_MAPPING`, `CHANGED_INTEGRATION_WIDE_FIELD_MAPPING`, `CHANGED_LINKED_ACCOUNT_FIELD_MAPPING`, `DELETED_INTEGRATION_WIDE_FIELD_MAPPING`, `DELETED_LINKED_ACCOUNT_FIELD_MAPPING`, `FORCED_LINKED_ACCOUNT_RESYNC`, `MUTED_ISSUE`, `GENERATED_MAGIC_LINK`, `ENABLED_MERGE_WEBHOOK`, `DISABLED_MERGE_WEBHOOK`, `MERGE_WEBHOOK_TARGET_CHANGED` + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**start_date:** `typing.Optional[str]` — If included, will only include audit trail events that occurred after this time + +
+
+ +
+
+ +**user_email:** `typing.Optional[str]` — If provided, this will return events associated with the specified user email. Please note that the email address reflects the user's email at the time of the event, and may not be their current email. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats AvailableActions +
client.ats.available_actions.retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of models and actions available for an account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.available_actions.retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats Candidates +
client.ats.candidates.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Candidate` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.candidates.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**email_addresses:** `typing.Optional[str]` — If provided, will only return candidates with these email addresses; multiple addresses can be separated by commas. + +
+
+ +
+
+ +**expand:** `typing.Optional[CandidatesListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**first_name:** `typing.Optional[str]` — If provided, will only return candidates with this first name. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**last_name:** `typing.Optional[str]` — If provided, will only return candidates with this last name. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**tags:** `typing.Optional[str]` — If provided, will only return candidates with these tags; multiple tags can be separated by commas. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.candidates.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `Candidate` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ats import CandidateRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.candidates.create( + model=CandidateRequest(), + remote_user_id="remote_user_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `CandidateRequest` + +
+
+ +
+
+ +**remote_user_id:** `str` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.candidates.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Candidate` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.candidates.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[CandidatesRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.candidates.partial_update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Updates a `Candidate` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ats import PatchedCandidateRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.candidates.partial_update( + id="id", + model=PatchedCandidateRequest(), + remote_user_id="remote_user_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**model:** `PatchedCandidateRequest` + +
+
+ +
+
+ +**remote_user_id:** `str` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.candidates.ignore_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Ignores a specific row based on the `model_id` in the url. These records will have their properties set to null, and will not be updated in future syncs. The "reason" and "message" fields in the request body will be stored for audit purposes. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ats import ReasonEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.candidates.ignore_create( + model_id="model_id", + reason=ReasonEnum.GENERAL_CUSTOMER_REQUEST, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model_id:** `str` + +
+
+ +
+
+ +**reason:** `ReasonEnum` + +
+
+ +
+
+ +**message:** `typing.Optional[str]` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.candidates.meta_patch_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Candidate` PATCHs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.candidates.meta_patch_retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.candidates.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Candidate` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.candidates.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats Scopes +
client.ats.scopes.default_scopes_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get the default permissions for Merge Common Models and fields across all Linked Accounts of a given category. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.scopes.default_scopes_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.scopes.linked_account_scopes_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all available permissions for Merge Common Models and fields for a single Linked Account. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.scopes.linked_account_scopes_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.scopes.linked_account_scopes_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update permissions for any Common Model or field for a single Linked Account. Any Scopes not set in this POST request will inherit the default Scopes. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes) +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ats import ( + IndividualCommonModelScopeDeserializerRequest, + ModelPermissionDeserializerRequest, +) + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.scopes.linked_account_scopes_create( + common_models=[ + IndividualCommonModelScopeDeserializerRequest( + model_name="Employee", + model_permissions={ + "READ": ModelPermissionDeserializerRequest( + is_enabled=True, + ), + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ), + }, + ), + IndividualCommonModelScopeDeserializerRequest( + model_name="Benefit", + model_permissions={ + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ) + }, + ), + ], +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**common_models:** `typing.Sequence[IndividualCommonModelScopeDeserializerRequest]` — The common models you want to update the scopes for + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats DeleteAccount +
client.ats.delete_account.delete() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.delete_account.delete() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats Departments +
client.ats.departments.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Department` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.departments.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.departments.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Department` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.departments.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats Eeocs +
client.ats.eeocs.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `EEOC` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.eeocs.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**candidate_id:** `typing.Optional[str]` — If provided, will only return EEOC info for this candidate. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["candidate"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[EeocsListRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[EeocsListRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.eeocs.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `EEOC` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.eeocs.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["candidate"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[EeocsRetrieveRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[EeocsRetrieveRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats FieldMapping +
client.ats.field_mapping.field_mappings_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all Field Mappings for this Linked Account. Field Mappings are mappings between third-party Remote Fields and user defined Merge fields. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/overview/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.field_mapping.field_mappings_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.field_mapping.field_mappings_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create new Field Mappings that will be available after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.field_mapping.field_mappings_create( + target_field_name="example_target_field_name", + target_field_description="this is a example description of the target field", + remote_field_traversal_path=["example_remote_field"], + remote_method="GET", + remote_url_path="/example-url-path", + common_model_name="ExampleCommonModel", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**target_field_name:** `str` — The name of the target field you want this remote field to map to. + +
+
+ +
+
+ +**target_field_description:** `str` — The description of the target field you want this remote field to map to. + +
+
+ +
+
+ +**remote_field_traversal_path:** `typing.Sequence[typing.Any]` — The field traversal path of the remote field listed when you hit the GET /remote-fields endpoint. + +
+
+ +
+
+ +**remote_method:** `str` — The method of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**remote_url_path:** `str` — The path of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**common_model_name:** `str` — The name of the Common Model that the remote field corresponds to in a given category. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.field_mapping.field_mappings_destroy(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Deletes Field Mappings for a Linked Account. All data related to this Field Mapping will be deleted and these changes will be reflected after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.field_mapping.field_mappings_destroy( + field_mapping_id="field_mapping_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**field_mapping_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.field_mapping.field_mappings_partial_update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create or update existing Field Mappings for a Linked Account. Changes will be reflected after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.field_mapping.field_mappings_partial_update( + field_mapping_id="field_mapping_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**field_mapping_id:** `str` + +
+
+ +
+
+ +**remote_field_traversal_path:** `typing.Optional[typing.Sequence[typing.Any]]` — The field traversal path of the remote field listed when you hit the GET /remote-fields endpoint. + +
+
+ +
+
+ +**remote_method:** `typing.Optional[str]` — The method of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**remote_url_path:** `typing.Optional[str]` — The path of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.field_mapping.remote_fields_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all remote fields for a Linked Account. Remote fields are third-party fields that are accessible after initial sync if remote_data is enabled. You can use remote fields to override existing Merge fields or map a new Merge field. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/overview/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.field_mapping.remote_fields_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**common_models:** `typing.Optional[str]` — A comma seperated list of Common Model names. If included, will only return Remote Fields for those Common Models. + +
+
+ +
+
+ +**include_example_values:** `typing.Optional[str]` — If true, will include example values, where available, for remote fields in the 3rd party platform. These examples come from active data from your customers. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.field_mapping.target_fields_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all organization-wide Target Fields, this will not include any Linked Account specific Target Fields. Organization-wide Target Fields are additional fields appended to the Merge Common Model for all Linked Accounts in a category. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/target-fields/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.field_mapping.target_fields_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats GenerateKey +
client.ats.generate_key.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a remote key. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.generate_key.create( + name="Remote Deployment Key 1", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**name:** `str` — The name of the remote key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats Interviews +
client.ats.interviews.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `ScheduledInterview` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.interviews.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**application_id:** `typing.Optional[str]` — If provided, will only return interviews for this application. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[InterviewsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**job_id:** `typing.Optional[str]` — If provided, wll only return interviews organized for this job. + +
+
+ +
+
+ +**job_interview_stage_id:** `typing.Optional[str]` — If provided, will only return interviews at this stage. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**organizer_id:** `typing.Optional[str]` — If provided, will only return interviews organized by this user. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["status"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["status"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.interviews.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `ScheduledInterview` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ats import ScheduledInterviewRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.interviews.create( + model=ScheduledInterviewRequest(), + remote_user_id="remote_user_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `ScheduledInterviewRequest` + +
+
+ +
+
+ +**remote_user_id:** `str` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.interviews.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `ScheduledInterview` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.interviews.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[InterviewsRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["status"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["status"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.interviews.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `ScheduledInterview` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.interviews.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats Issues +
client.ats.issues.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Gets issues. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.issues.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**account_token:** `typing.Optional[str]` + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_date:** `typing.Optional[str]` — If included, will only include issues whose most recent action occurred before this time + +
+
+ +
+
+ +**end_user_organization_name:** `typing.Optional[str]` + +
+
+ +
+
+ +**first_incident_time_after:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose first incident time was after this datetime. + +
+
+ +
+
+ +**first_incident_time_before:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose first incident time was before this datetime. + +
+
+ +
+
+ +**include_muted:** `typing.Optional[str]` — If true, will include muted issues + +
+
+ +
+
+ +**integration_name:** `typing.Optional[str]` + +
+
+ +
+
+ +**last_incident_time_after:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose last incident time was after this datetime. + +
+
+ +
+
+ +**last_incident_time_before:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose last incident time was before this datetime. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**start_date:** `typing.Optional[str]` — If included, will only include issues whose most recent action occurred after this time + +
+
+ +
+
+ +**status:** `typing.Optional[IssuesListRequestStatus]` + +Status of the issue. Options: ('ONGOING', 'RESOLVED') + +- `ONGOING` - ONGOING +- `RESOLVED` - RESOLVED + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.issues.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific issue. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.issues.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats JobInterviewStages +
client.ats.job_interview_stages.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `JobInterviewStage` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.job_interview_stages.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["job"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**job_id:** `typing.Optional[str]` — If provided, will only return interview stages for this job. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.job_interview_stages.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `JobInterviewStage` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.job_interview_stages.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["job"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats JobPostings +
client.ats.job_postings.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `JobPosting` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.job_postings.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["job"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**status:** `typing.Optional[JobPostingsListRequestStatus]` + +If provided, will only return Job Postings with this status. Options: ('PUBLISHED', 'CLOSED', 'DRAFT', 'INTERNAL', 'PENDING') + +- `PUBLISHED` - PUBLISHED +- `CLOSED` - CLOSED +- `DRAFT` - DRAFT +- `INTERNAL` - INTERNAL +- `PENDING` - PENDING + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.job_postings.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `JobPosting` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.job_postings.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["job"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats Jobs +
client.ats.jobs.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Job` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.jobs.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**code:** `typing.Optional[str]` — If provided, will only return jobs with this code. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[JobsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**offices:** `typing.Optional[str]` — If provided, will only return jobs for this office; multiple offices can be separated by commas. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["status"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["status"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**status:** `typing.Optional[JobsListRequestStatus]` + +If provided, will only return jobs with this status. Options: ('OPEN', 'CLOSED', 'DRAFT', 'ARCHIVED', 'PENDING') + +- `OPEN` - OPEN +- `CLOSED` - CLOSED +- `DRAFT` - DRAFT +- `ARCHIVED` - ARCHIVED +- `PENDING` - PENDING + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.jobs.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Job` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.jobs.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[JobsRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["status"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["status"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.jobs.screening_questions_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `ScreeningQuestion` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.jobs.screening_questions_list( + job_id="job_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**job_id:** `str` + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[JobsScreeningQuestionsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats LinkToken +
client.ats.link_token.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a link token to be used when linking a new end user. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ats import CategoriesEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.link_token.create( + end_user_email_address="example@gmail.com", + end_user_organization_name="Test Organization", + end_user_origin_id="12345", + categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**end_user_email_address:** `str` — Your end user's email address. This is purely for identification purposes - setting this value will not cause any emails to be sent. + +
+
+ +
+
+ +**end_user_organization_name:** `str` — Your end user's organization. + +
+
+ +
+
+ +**end_user_origin_id:** `str` — This unique identifier typically represents the ID for your end user in your product's database. This value must be distinct from other Linked Accounts' unique identifiers. + +
+
+ +
+
+ +**categories:** `typing.Sequence[CategoriesEnum]` — The integration categories to show in Merge Link. + +
+
+ +
+
+ +**integration:** `typing.Optional[str]` — The slug of a specific pre-selected integration for this linking flow token. For examples of slugs, see https://docs.merge.dev/guides/merge-link/single-integration/. + +
+
+ +
+
+ +**link_expiry_mins:** `typing.Optional[int]` — An integer number of minutes between [30, 720 or 10080 if for a Magic Link URL] for how long this token is valid. Defaults to 30. + +
+
+ +
+
+ +**should_create_magic_link_url:** `typing.Optional[bool]` — Whether to generate a Magic Link URL. Defaults to false. For more information on Magic Link, see https://merge.dev/blog/integrations-fast-say-hello-to-magic-link. + +
+
+ +
+
+ +**common_models:** `typing.Optional[typing.Sequence[CommonModelScopesBodyRequest]]` — An array of objects to specify the models and fields that will be disabled for a given Linked Account. Each object uses model_id, enabled_actions, and disabled_fields to specify the model, method, and fields that are scoped for a given Linked Account. + +
+
+ +
+
+ +**category_common_model_scopes:** `typing.Optional[ + typing.Dict[ + str, + typing.Optional[ + typing.Sequence[IndividualCommonModelScopeDeserializerRequest] + ], + ] +]` — When creating a Link Token, you can set permissions for Common Models that will apply to the account that is going to be linked. Any model or field not specified in link token payload will default to existing settings. + +
+
+ +
+
+ +**language:** `typing.Optional[str]` — The language code for the language to localize Merge Link to. + +
+
+ +
+
+ +**integration_specific_config:** `typing.Optional[typing.Dict[str, typing.Any]]` — A JSON object containing integration-specific configuration options. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats LinkedAccounts +
client.ats.linked_accounts.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +List linked accounts for your organization. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.linked_accounts.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**category:** `typing.Optional[LinkedAccountsListRequestCategory]` + +Options: `accounting`, `ats`, `crm`, `filestorage`, `hris`, `mktg`, `ticketing` + +- `hris` - hris +- `ats` - ats +- `accounting` - accounting +- `ticketing` - ticketing +- `crm` - crm +- `mktg` - mktg +- `filestorage` - filestorage + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_user_email_address:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given email address. + +
+
+ +
+
+ +**end_user_organization_name:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given organization name. + +
+
+ +
+
+ +**end_user_origin_id:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given origin ID. + +
+
+ +
+
+ +**end_user_origin_ids:** `typing.Optional[str]` — Comma-separated list of EndUser origin IDs, making it possible to specify multiple EndUsers at once. + +
+
+ +
+
+ +**id:** `typing.Optional[str]` + +
+
+ +
+
+ +**ids:** `typing.Optional[str]` — Comma-separated list of LinkedAccount IDs, making it possible to specify multiple LinkedAccounts at once. + +
+
+ +
+
+ +**include_duplicates:** `typing.Optional[bool]` — If `true`, will include complete production duplicates of the account specified by the `id` query parameter in the response. `id` must be for a complete production linked account. + +
+
+ +
+
+ +**integration_name:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given integration name. + +
+
+ +
+
+ +**is_test_account:** `typing.Optional[str]` — If included, will only include test linked accounts. If not included, will only include non-test linked accounts. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**status:** `typing.Optional[str]` — Filter by status. Options: `COMPLETE`, `INCOMPLETE`, `RELINK_NEEDED` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats Offers +
client.ats.offers.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Offer` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.offers.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**application_id:** `typing.Optional[str]` — If provided, will only return offers for this application. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**creator_id:** `typing.Optional[str]` — If provided, will only return offers created by this user. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[OffersListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["status"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["status"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.offers.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Offer` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.offers.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[OffersRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["status"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["status"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats Offices +
client.ats.offices.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Office` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.offices.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.offices.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Office` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.offices.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats Passthrough +
client.ats.passthrough.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Pull data from an endpoint not currently supported by Merge. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ats import DataPassthroughRequest, MethodEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request:** `DataPassthroughRequest` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats RegenerateKey +
client.ats.regenerate_key.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Exchange remote keys. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.regenerate_key.create( + name="Remote Deployment Key 1", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**name:** `str` — The name of the remote key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats RejectReasons +
client.ats.reject_reasons.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `RejectReason` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.reject_reasons.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.reject_reasons.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `RejectReason` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.reject_reasons.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats Scorecards +
client.ats.scorecards.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Scorecard` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.scorecards.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**application_id:** `typing.Optional[str]` — If provided, will only return scorecards for this application. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[ScorecardsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**interview_id:** `typing.Optional[str]` — If provided, will only return scorecards for this interview. + +
+
+ +
+
+ +**interviewer_id:** `typing.Optional[str]` — If provided, will only return scorecards for this interviewer. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["overall_recommendation"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["overall_recommendation"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.scorecards.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Scorecard` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.scorecards.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[ScorecardsRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["overall_recommendation"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["overall_recommendation"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats SyncStatus +
client.ats.sync_status.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get syncing status. Possible values: `DISABLED`, `DONE`, `FAILED`, `PARTIALLY_SYNCED`, `PAUSED`, `SYNCING`. Learn more about sync status in our [Help Center](https://help.merge.dev/en/articles/8184193-merge-sync-statuses). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.sync_status.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats ForceResync +
client.ats.force_resync.sync_status_resync_create() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Force re-sync of all models. This is available for all organizations via the dashboard. Force re-sync is also available programmatically via API for monthly, quarterly, and highest sync frequency customers on the Launch, Professional, or Enterprise plans. Doing so will consume a sync credit for the relevant linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.force_resync.sync_status_resync_create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats Tags +
client.ats.tags.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Tag` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.tags.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats Users +
client.ats.users.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `RemoteUser` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.users.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**email:** `typing.Optional[str]` — If provided, will only return remote users with the given email address + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["access_role"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["access_role"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.users.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `RemoteUser` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.users.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["access_role"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["access_role"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ats WebhookReceivers +
client.ats.webhook_receivers.list() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `WebhookReceiver` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.webhook_receivers.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ats.webhook_receivers.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `WebhookReceiver` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ats.webhook_receivers.create( + event="event", + is_active=True, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**event:** `str` + +
+
+ +
+
+ +**is_active:** `bool` + +
+
+ +
+
+ +**key:** `typing.Optional[str]` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris AccountDetails +
client.hris.account_details.retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get details for a linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.account_details.retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris AccountToken +
client.hris.account_token.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns the account token for the end user with the provided public token. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.account_token.retrieve( + public_token="public_token", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**public_token:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris AsyncPassthrough +
client.hris.async_passthrough.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Asynchronously pull data from an endpoint not currently supported by Merge. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.hris import DataPassthroughRequest, MethodEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.async_passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request:** `DataPassthroughRequest` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.async_passthrough.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Retrieves data from earlier async-passthrough POST request +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.async_passthrough.retrieve( + async_passthrough_receipt_id="async_passthrough_receipt_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**async_passthrough_receipt_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris AuditTrail +
client.hris.audit_trail.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Gets a list of audit trail events. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.audit_trail.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_date:** `typing.Optional[str]` — If included, will only include audit trail events that occurred before this time + +
+
+ +
+
+ +**event_type:** `typing.Optional[str]` — If included, will only include events with the given event type. Possible values include: `CREATED_REMOTE_PRODUCTION_API_KEY`, `DELETED_REMOTE_PRODUCTION_API_KEY`, `CREATED_TEST_API_KEY`, `DELETED_TEST_API_KEY`, `REGENERATED_PRODUCTION_API_KEY`, `INVITED_USER`, `TWO_FACTOR_AUTH_ENABLED`, `TWO_FACTOR_AUTH_DISABLED`, `DELETED_LINKED_ACCOUNT`, `CREATED_DESTINATION`, `DELETED_DESTINATION`, `CHANGED_DESTINATION`, `CHANGED_SCOPES`, `CHANGED_PERSONAL_INFORMATION`, `CHANGED_ORGANIZATION_SETTINGS`, `ENABLED_INTEGRATION`, `DISABLED_INTEGRATION`, `ENABLED_CATEGORY`, `DISABLED_CATEGORY`, `CHANGED_PASSWORD`, `RESET_PASSWORD`, `ENABLED_REDACT_UNMAPPED_DATA_FOR_ORGANIZATION`, `ENABLED_REDACT_UNMAPPED_DATA_FOR_LINKED_ACCOUNT`, `DISABLED_REDACT_UNMAPPED_DATA_FOR_ORGANIZATION`, `DISABLED_REDACT_UNMAPPED_DATA_FOR_LINKED_ACCOUNT`, `CREATED_INTEGRATION_WIDE_FIELD_MAPPING`, `CREATED_LINKED_ACCOUNT_FIELD_MAPPING`, `CHANGED_INTEGRATION_WIDE_FIELD_MAPPING`, `CHANGED_LINKED_ACCOUNT_FIELD_MAPPING`, `DELETED_INTEGRATION_WIDE_FIELD_MAPPING`, `DELETED_LINKED_ACCOUNT_FIELD_MAPPING`, `FORCED_LINKED_ACCOUNT_RESYNC`, `MUTED_ISSUE`, `GENERATED_MAGIC_LINK`, `ENABLED_MERGE_WEBHOOK`, `DISABLED_MERGE_WEBHOOK`, `MERGE_WEBHOOK_TARGET_CHANGED` + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**start_date:** `typing.Optional[str]` — If included, will only include audit trail events that occurred after this time + +
+
+ +
+
+ +**user_email:** `typing.Optional[str]` — If provided, this will return events associated with the specified user email. Please note that the email address reflects the user's email at the time of the event, and may not be their current email. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris AvailableActions +
client.hris.available_actions.retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of models and actions available for an account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.available_actions.retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris BankInfo +
client.hris.bank_info.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `BankInfo` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.bank_info.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**account_type:** `typing.Optional[BankInfoListRequestAccountType]` + +If provided, will only return BankInfo's with this account type. Options: ('SAVINGS', 'CHECKING') + +- `SAVINGS` - SAVINGS +- `CHECKING` - CHECKING + +
+
+ +
+
+ +**bank_name:** `typing.Optional[str]` — If provided, will only return BankInfo's with this bank name. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**employee_id:** `typing.Optional[str]` — If provided, will only return bank accounts for this employee. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["employee"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**order_by:** `typing.Optional[BankInfoListRequestOrderBy]` — Overrides the default ordering for this endpoint. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["account_type"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["account_type"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.bank_info.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `BankInfo` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.bank_info.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["employee"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["account_type"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["account_type"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris Benefits +
client.hris.benefits.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Benefit` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.benefits.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**employee_id:** `typing.Optional[str]` — If provided, will return the benefits associated with the employee. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["employee"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.benefits.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Benefit` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.benefits.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["employee"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris Companies +
client.hris.companies.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Company` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.companies.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.companies.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Company` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.companies.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris Scopes +
client.hris.scopes.default_scopes_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get the default permissions for Merge Common Models and fields across all Linked Accounts of a given category. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.scopes.default_scopes_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.scopes.linked_account_scopes_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all available permissions for Merge Common Models and fields for a single Linked Account. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.scopes.linked_account_scopes_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.scopes.linked_account_scopes_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update permissions for any Common Model or field for a single Linked Account. Any Scopes not set in this POST request will inherit the default Scopes. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes) +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.hris import ( + IndividualCommonModelScopeDeserializerRequest, + ModelPermissionDeserializerRequest, +) + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.scopes.linked_account_scopes_create( + common_models=[ + IndividualCommonModelScopeDeserializerRequest( + model_name="Employee", + model_permissions={ + "READ": ModelPermissionDeserializerRequest( + is_enabled=True, + ), + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ), + }, + ), + IndividualCommonModelScopeDeserializerRequest( + model_name="Benefit", + model_permissions={ + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ) + }, + ), + ], +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**common_models:** `typing.Sequence[IndividualCommonModelScopeDeserializerRequest]` — The common models you want to update the scopes for + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris DeleteAccount +
client.hris.delete_account.delete() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.delete_account.delete() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris Dependents +
client.hris.dependents.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Dependent` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.dependents.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_sensitive_fields:** `typing.Optional[bool]` — Whether to include sensitive fields (such as social security numbers) in the response. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.dependents.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Dependent` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.dependents.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_sensitive_fields:** `typing.Optional[bool]` — Whether to include sensitive fields (such as social security numbers) in the response. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris EmployeePayrollRuns +
client.hris.employee_payroll_runs.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `EmployeePayrollRun` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.employee_payroll_runs.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**employee_id:** `typing.Optional[str]` — If provided, will only return employee payroll runs for this employee. + +
+
+ +
+
+ +**ended_after:** `typing.Optional[dt.datetime]` — If provided, will only return employee payroll runs ended after this datetime. + +
+
+ +
+
+ +**ended_before:** `typing.Optional[dt.datetime]` — If provided, will only return employee payroll runs ended before this datetime. + +
+
+ +
+
+ +**expand:** `typing.Optional[EmployeePayrollRunsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**payroll_run_id:** `typing.Optional[str]` — If provided, will only return employee payroll runs for this employee. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**started_after:** `typing.Optional[dt.datetime]` — If provided, will only return employee payroll runs started after this datetime. + +
+
+ +
+
+ +**started_before:** `typing.Optional[dt.datetime]` — If provided, will only return employee payroll runs started before this datetime. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.employee_payroll_runs.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `EmployeePayrollRun` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.employee_payroll_runs.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[EmployeePayrollRunsRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris Employees +
client.hris.employees.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Employee` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.employees.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return employees for this company. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**display_full_name:** `typing.Optional[str]` — If provided, will only return employees with this display name. + +
+
+ +
+
+ +**employment_status:** `typing.Optional[EmployeesListRequestEmploymentStatus]` + +If provided, will only return employees with this employment status. + +- `ACTIVE` - ACTIVE +- `PENDING` - PENDING +- `INACTIVE` - INACTIVE + +
+
+ +
+
+ +**employment_type:** `typing.Optional[str]` — If provided, will only return employees that have an employment of the specified employment_type. + +
+
+ +
+
+ +**expand:** `typing.Optional[EmployeesListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**first_name:** `typing.Optional[str]` — If provided, will only return employees with this first name. + +
+
+ +
+
+ +**groups:** `typing.Optional[str]` — If provided, will only return employees matching the group ids; multiple groups can be separated by commas. + +
+
+ +
+
+ +**home_location_id:** `typing.Optional[str]` — If provided, will only return employees for this home location. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_sensitive_fields:** `typing.Optional[bool]` — Whether to include sensitive fields (such as social security numbers) in the response. + +
+
+ +
+
+ +**job_title:** `typing.Optional[str]` — If provided, will only return employees that have an employment of the specified job_title. + +
+
+ +
+
+ +**last_name:** `typing.Optional[str]` — If provided, will only return employees with this last name. + +
+
+ +
+
+ +**manager_id:** `typing.Optional[str]` — If provided, will only return employees for this manager. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**pay_group_id:** `typing.Optional[str]` — If provided, will only return employees for this pay group + +
+
+ +
+
+ +**personal_email:** `typing.Optional[str]` — If provided, will only return Employees with this personal email + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[EmployeesListRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[EmployeesListRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**started_after:** `typing.Optional[dt.datetime]` — If provided, will only return employees that started after this datetime. + +
+
+ +
+
+ +**started_before:** `typing.Optional[dt.datetime]` — If provided, will only return employees that started before this datetime. + +
+
+ +
+
+ +**team_id:** `typing.Optional[str]` — If provided, will only return employees for this team. + +
+
+ +
+
+ +**terminated_after:** `typing.Optional[dt.datetime]` — If provided, will only return employees that were terminated after this datetime. + +
+
+ +
+
+ +**terminated_before:** `typing.Optional[dt.datetime]` — If provided, will only return employees that were terminated before this datetime. + +
+
+ +
+
+ +**work_email:** `typing.Optional[str]` — If provided, will only return Employees with this work email + +
+
+ +
+
+ +**work_location_id:** `typing.Optional[str]` — If provided, will only return employees for this location. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.employees.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates an `Employee` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.hris import EmployeeRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.employees.create( + model=EmployeeRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `EmployeeRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.employees.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Employee` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.employees.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[EmployeesRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_sensitive_fields:** `typing.Optional[bool]` — Whether to include sensitive fields (such as social security numbers) in the response. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[EmployeesRetrieveRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[EmployeesRetrieveRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.employees.ignore_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Ignores a specific row based on the `model_id` in the url. These records will have their properties set to null, and will not be updated in future syncs. The "reason" and "message" fields in the request body will be stored for audit purposes. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.hris import ReasonEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.employees.ignore_create( + model_id="model_id", + reason=ReasonEnum.GENERAL_CUSTOMER_REQUEST, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model_id:** `str` + +
+
+ +
+
+ +**reason:** `IgnoreCommonModelRequestReason` + +
+
+ +
+
+ +**message:** `typing.Optional[str]` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.employees.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Employee` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.employees.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris EmployerBenefits +
client.hris.employer_benefits.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `EmployerBenefit` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.employer_benefits.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.employer_benefits.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `EmployerBenefit` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.employer_benefits.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris Employments +
client.hris.employments.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Employment` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.employments.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**employee_id:** `typing.Optional[str]` — If provided, will only return employments for this employee. + +
+
+ +
+
+ +**expand:** `typing.Optional[EmploymentsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**order_by:** `typing.Optional[EmploymentsListRequestOrderBy]` — Overrides the default ordering for this endpoint. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[EmploymentsListRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[EmploymentsListRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.employments.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Employment` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.employments.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[EmploymentsRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[EmploymentsRetrieveRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[EmploymentsRetrieveRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris FieldMapping +
client.hris.field_mapping.field_mappings_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all Field Mappings for this Linked Account. Field Mappings are mappings between third-party Remote Fields and user defined Merge fields. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/overview/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.field_mapping.field_mappings_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.field_mapping.field_mappings_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create new Field Mappings that will be available after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.field_mapping.field_mappings_create( + target_field_name="example_target_field_name", + target_field_description="this is a example description of the target field", + remote_field_traversal_path=["example_remote_field"], + remote_method="GET", + remote_url_path="/example-url-path", + common_model_name="ExampleCommonModel", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**target_field_name:** `str` — The name of the target field you want this remote field to map to. + +
+
+ +
+
+ +**target_field_description:** `str` — The description of the target field you want this remote field to map to. + +
+
+ +
+
+ +**remote_field_traversal_path:** `typing.Sequence[typing.Any]` — The field traversal path of the remote field listed when you hit the GET /remote-fields endpoint. + +
+
+ +
+
+ +**remote_method:** `str` — The method of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**remote_url_path:** `str` — The path of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**common_model_name:** `str` — The name of the Common Model that the remote field corresponds to in a given category. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.field_mapping.field_mappings_destroy(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Deletes Field Mappings for a Linked Account. All data related to this Field Mapping will be deleted and these changes will be reflected after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.field_mapping.field_mappings_destroy( + field_mapping_id="field_mapping_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**field_mapping_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.field_mapping.field_mappings_partial_update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create or update existing Field Mappings for a Linked Account. Changes will be reflected after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.field_mapping.field_mappings_partial_update( + field_mapping_id="field_mapping_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**field_mapping_id:** `str` + +
+
+ +
+
+ +**remote_field_traversal_path:** `typing.Optional[typing.Sequence[typing.Any]]` — The field traversal path of the remote field listed when you hit the GET /remote-fields endpoint. + +
+
+ +
+
+ +**remote_method:** `typing.Optional[str]` — The method of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**remote_url_path:** `typing.Optional[str]` — The path of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.field_mapping.remote_fields_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all remote fields for a Linked Account. Remote fields are third-party fields that are accessible after initial sync if remote_data is enabled. You can use remote fields to override existing Merge fields or map a new Merge field. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/overview/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.field_mapping.remote_fields_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**common_models:** `typing.Optional[str]` — A comma seperated list of Common Model names. If included, will only return Remote Fields for those Common Models. + +
+
+ +
+
+ +**include_example_values:** `typing.Optional[str]` — If true, will include example values, where available, for remote fields in the 3rd party platform. These examples come from active data from your customers. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.field_mapping.target_fields_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all organization-wide Target Fields, this will not include any Linked Account specific Target Fields. Organization-wide Target Fields are additional fields appended to the Merge Common Model for all Linked Accounts in a category. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/target-fields/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.field_mapping.target_fields_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris GenerateKey +
client.hris.generate_key.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a remote key. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.generate_key.create( + name="Remote Deployment Key 1", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**name:** `str` — The name of the remote key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris Groups +
client.hris.groups.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Group` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.groups.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**is_commonly_used_as_team:** `typing.Optional[str]` — If provided, specifies whether to return only Group objects which refer to a team in the third party platform. Note that this is an opinionated view based on how a team may be represented in the third party platform. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**names:** `typing.Optional[str]` — If provided, will only return groups with these names. Multiple values can be separated by commas. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["type"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["type"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**types:** `typing.Optional[str]` — If provided, will only return groups of these types. Multiple values can be separated by commas. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.groups.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Group` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.groups.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["type"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["type"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris Issues +
client.hris.issues.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Gets issues. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.issues.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**account_token:** `typing.Optional[str]` + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_date:** `typing.Optional[str]` — If included, will only include issues whose most recent action occurred before this time + +
+
+ +
+
+ +**end_user_organization_name:** `typing.Optional[str]` + +
+
+ +
+
+ +**first_incident_time_after:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose first incident time was after this datetime. + +
+
+ +
+
+ +**first_incident_time_before:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose first incident time was before this datetime. + +
+
+ +
+
+ +**include_muted:** `typing.Optional[str]` — If true, will include muted issues + +
+
+ +
+
+ +**integration_name:** `typing.Optional[str]` + +
+
+ +
+
+ +**last_incident_time_after:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose last incident time was after this datetime. + +
+
+ +
+
+ +**last_incident_time_before:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose last incident time was before this datetime. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**start_date:** `typing.Optional[str]` — If included, will only include issues whose most recent action occurred after this time + +
+
+ +
+
+ +**status:** `typing.Optional[IssuesListRequestStatus]` + +Status of the issue. Options: ('ONGOING', 'RESOLVED') + +- `ONGOING` - ONGOING +- `RESOLVED` - RESOLVED + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.issues.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific issue. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.issues.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris LinkToken +
client.hris.link_token.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a link token to be used when linking a new end user. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.hris import CategoriesEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.link_token.create( + end_user_email_address="example@gmail.com", + end_user_organization_name="Test Organization", + end_user_origin_id="12345", + categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**end_user_email_address:** `str` — Your end user's email address. This is purely for identification purposes - setting this value will not cause any emails to be sent. + +
+
+ +
+
+ +**end_user_organization_name:** `str` — Your end user's organization. + +
+
+ +
+
+ +**end_user_origin_id:** `str` — This unique identifier typically represents the ID for your end user in your product's database. This value must be distinct from other Linked Accounts' unique identifiers. + +
+
+ +
+
+ +**categories:** `typing.Sequence[CategoriesEnum]` — The integration categories to show in Merge Link. + +
+
+ +
+
+ +**integration:** `typing.Optional[str]` — The slug of a specific pre-selected integration for this linking flow token. For examples of slugs, see https://docs.merge.dev/guides/merge-link/single-integration/. + +
+
+ +
+
+ +**link_expiry_mins:** `typing.Optional[int]` — An integer number of minutes between [30, 720 or 10080 if for a Magic Link URL] for how long this token is valid. Defaults to 30. + +
+
+ +
+
+ +**should_create_magic_link_url:** `typing.Optional[bool]` — Whether to generate a Magic Link URL. Defaults to false. For more information on Magic Link, see https://merge.dev/blog/integrations-fast-say-hello-to-magic-link. + +
+
+ +
+
+ +**common_models:** `typing.Optional[typing.Sequence[CommonModelScopesBodyRequest]]` — An array of objects to specify the models and fields that will be disabled for a given Linked Account. Each object uses model_id, enabled_actions, and disabled_fields to specify the model, method, and fields that are scoped for a given Linked Account. + +
+
+ +
+
+ +**category_common_model_scopes:** `typing.Optional[ + typing.Dict[ + str, + typing.Optional[ + typing.Sequence[IndividualCommonModelScopeDeserializerRequest] + ], + ] +]` — When creating a Link Token, you can set permissions for Common Models that will apply to the account that is going to be linked. Any model or field not specified in link token payload will default to existing settings. + +
+
+ +
+
+ +**language:** `typing.Optional[str]` — The language code for the language to localize Merge Link to. + +
+
+ +
+
+ +**integration_specific_config:** `typing.Optional[typing.Dict[str, typing.Any]]` — A JSON object containing integration-specific configuration options. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris LinkedAccounts +
client.hris.linked_accounts.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +List linked accounts for your organization. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.linked_accounts.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**category:** `typing.Optional[LinkedAccountsListRequestCategory]` + +Options: ('hris', 'ats', 'accounting', 'ticketing', 'crm', 'mktg', 'filestorage') + +- `hris` - hris +- `ats` - ats +- `accounting` - accounting +- `ticketing` - ticketing +- `crm` - crm +- `mktg` - mktg +- `filestorage` - filestorage + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_user_email_address:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given email address. + +
+
+ +
+
+ +**end_user_organization_name:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given organization name. + +
+
+ +
+
+ +**end_user_origin_id:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given origin ID. + +
+
+ +
+
+ +**end_user_origin_ids:** `typing.Optional[str]` — Comma-separated list of EndUser origin IDs, making it possible to specify multiple EndUsers at once. + +
+
+ +
+
+ +**id:** `typing.Optional[str]` + +
+
+ +
+
+ +**ids:** `typing.Optional[str]` — Comma-separated list of LinkedAccount IDs, making it possible to specify multiple LinkedAccounts at once. + +
+
+ +
+
+ +**include_duplicates:** `typing.Optional[bool]` — If `true`, will include complete production duplicates of the account specified by the `id` query parameter in the response. `id` must be for a complete production linked account. + +
+
+ +
+
+ +**integration_name:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given integration name. + +
+
+ +
+
+ +**is_test_account:** `typing.Optional[str]` — If included, will only include test linked accounts. If not included, will only include non-test linked accounts. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**status:** `typing.Optional[str]` — Filter by status. Options: `COMPLETE`, `INCOMPLETE`, `RELINK_NEEDED` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris Locations +
client.hris.locations.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Location` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.locations.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**location_type:** `typing.Optional[LocationsListRequestLocationType]` + +If provided, will only return locations with this location_type + +- `HOME` - HOME +- `WORK` - WORK + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["location_type"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["location_type"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.locations.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Location` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.locations.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["location_type"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["location_type"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris Passthrough +
client.hris.passthrough.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Pull data from an endpoint not currently supported by Merge. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.hris import DataPassthroughRequest, MethodEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request:** `DataPassthroughRequest` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris PayGroups +
client.hris.pay_groups.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `PayGroup` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.pay_groups.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.pay_groups.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `PayGroup` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.pay_groups.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris PayrollRuns +
client.hris.payroll_runs.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `PayrollRun` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.payroll_runs.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**ended_after:** `typing.Optional[dt.datetime]` — If provided, will only return payroll runs ended after this datetime. + +
+
+ +
+
+ +**ended_before:** `typing.Optional[dt.datetime]` — If provided, will only return payroll runs ended before this datetime. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[PayrollRunsListRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**run_type:** `typing.Optional[PayrollRunsListRequestRunType]` + +If provided, will only return PayrollRun's with this status. Options: ('REGULAR', 'OFF_CYCLE', 'CORRECTION', 'TERMINATION', 'SIGN_ON_BONUS') + +- `REGULAR` - REGULAR +- `OFF_CYCLE` - OFF_CYCLE +- `CORRECTION` - CORRECTION +- `TERMINATION` - TERMINATION +- `SIGN_ON_BONUS` - SIGN_ON_BONUS + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[PayrollRunsListRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**started_after:** `typing.Optional[dt.datetime]` — If provided, will only return payroll runs started after this datetime. + +
+
+ +
+
+ +**started_before:** `typing.Optional[dt.datetime]` — If provided, will only return payroll runs started before this datetime. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.payroll_runs.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `PayrollRun` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.payroll_runs.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[PayrollRunsRetrieveRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[PayrollRunsRetrieveRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris RegenerateKey +
client.hris.regenerate_key.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Exchange remote keys. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.regenerate_key.create( + name="Remote Deployment Key 1", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**name:** `str` — The name of the remote key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris SyncStatus +
client.hris.sync_status.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get syncing status. Possible values: `DISABLED`, `DONE`, `FAILED`, `PARTIALLY_SYNCED`, `PAUSED`, `SYNCING`. Learn more about sync status in our [Help Center](https://help.merge.dev/en/articles/8184193-merge-sync-statuses). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.sync_status.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris ForceResync +
client.hris.force_resync.sync_status_resync_create() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Force re-sync of all models. This is available for all organizations via the dashboard. Force re-sync is also available programmatically via API for monthly, quarterly, and highest sync frequency customers on the Launch, Professional, or Enterprise plans. Doing so will consume a sync credit for the relevant linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.force_resync.sync_status_resync_create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris Teams +
client.hris.teams.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Team` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.teams.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["parent_team"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**parent_team_id:** `typing.Optional[str]` — If provided, will only return teams with this parent team. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.teams.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Team` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.teams.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["parent_team"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris TimeOff +
client.hris.time_off.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `TimeOff` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.time_off.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**approver_id:** `typing.Optional[str]` — If provided, will only return time off for this approver. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**employee_id:** `typing.Optional[str]` — If provided, will only return time off for this employee. + +
+
+ +
+
+ +**ended_after:** `typing.Optional[dt.datetime]` — If provided, will only return employees that ended after this datetime. + +
+
+ +
+
+ +**ended_before:** `typing.Optional[dt.datetime]` — If provided, will only return time-offs that ended before this datetime. + +
+
+ +
+
+ +**expand:** `typing.Optional[TimeOffListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[TimeOffListRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_type:** `typing.Optional[TimeOffListRequestRequestType]` + +If provided, will only return TimeOff with this request type. Options: ('VACATION', 'SICK', 'PERSONAL', 'JURY_DUTY', 'VOLUNTEER', 'BEREAVEMENT') + +- `VACATION` - VACATION +- `SICK` - SICK +- `PERSONAL` - PERSONAL +- `JURY_DUTY` - JURY_DUTY +- `VOLUNTEER` - VOLUNTEER +- `BEREAVEMENT` - BEREAVEMENT + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[TimeOffListRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**started_after:** `typing.Optional[dt.datetime]` — If provided, will only return time-offs that started after this datetime. + +
+
+ +
+
+ +**started_before:** `typing.Optional[dt.datetime]` — If provided, will only return time-offs that started before this datetime. + +
+
+ +
+
+ +**status:** `typing.Optional[TimeOffListRequestStatus]` + +If provided, will only return TimeOff with this status. Options: ('REQUESTED', 'APPROVED', 'DECLINED', 'CANCELLED', 'DELETED') + +- `REQUESTED` - REQUESTED +- `APPROVED` - APPROVED +- `DECLINED` - DECLINED +- `CANCELLED` - CANCELLED +- `DELETED` - DELETED + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.time_off.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `TimeOff` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.hris import TimeOffRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.time_off.create( + model=TimeOffRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `TimeOffRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.time_off.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `TimeOff` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.time_off.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[TimeOffRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[TimeOffRetrieveRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[TimeOffRetrieveRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.time_off.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `TimeOff` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.time_off.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris TimeOffBalances +
client.hris.time_off_balances.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `TimeOffBalance` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.time_off_balances.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**employee_id:** `typing.Optional[str]` — If provided, will only return time off balances for this employee. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["employee"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**policy_type:** `typing.Optional[TimeOffBalancesListRequestPolicyType]` + +If provided, will only return TimeOffBalance with this policy type. Options: ('VACATION', 'SICK', 'PERSONAL', 'JURY_DUTY', 'VOLUNTEER', 'BEREAVEMENT') + +- `VACATION` - VACATION +- `SICK` - SICK +- `PERSONAL` - PERSONAL +- `JURY_DUTY` - JURY_DUTY +- `VOLUNTEER` - VOLUNTEER +- `BEREAVEMENT` - BEREAVEMENT + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["policy_type"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["policy_type"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.time_off_balances.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `TimeOffBalance` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.time_off_balances.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["employee"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["policy_type"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["policy_type"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris TimesheetEntries +
client.hris.timesheet_entries.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `TimesheetEntry` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.timesheet_entries.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**employee_id:** `typing.Optional[str]` — If provided, will only return employee payroll runs for this employee. + +
+
+ +
+
+ +**ended_after:** `typing.Optional[str]` — If provided, will only return employee payroll runs ended after this datetime. + +
+
+ +
+
+ +**ended_before:** `typing.Optional[str]` — If provided, will only return employee payroll runs ended before this datetime. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**order_by:** `typing.Optional[TimesheetEntriesListRequestOrderBy]` — Overrides the default ordering for this endpoint. Possible values include: start_time, -start_time. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**started_after:** `typing.Optional[str]` — If provided, will only return employee payroll runs started after this datetime. + +
+
+ +
+
+ +**started_before:** `typing.Optional[str]` — If provided, will only return employee payroll runs started before this datetime. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.timesheet_entries.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `TimesheetEntry` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.hris import TimesheetEntryRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.timesheet_entries.create( + model=TimesheetEntryRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `TimesheetEntryRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.timesheet_entries.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `TimesheetEntry` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.timesheet_entries.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.timesheet_entries.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `TimesheetEntry` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.timesheet_entries.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Hris WebhookReceivers +
client.hris.webhook_receivers.list() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `WebhookReceiver` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.webhook_receivers.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.hris.webhook_receivers.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `WebhookReceiver` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.hris.webhook_receivers.create( + event="event", + is_active=True, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**event:** `str` + +
+
+ +
+
+ +**is_active:** `bool` + +
+
+ +
+
+ +**key:** `typing.Optional[str]` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm AccountDetails +
client.crm.account_details.retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get details for a linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.account_details.retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm AccountToken +
client.crm.account_token.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns the account token for the end user with the provided public token. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.account_token.retrieve( + public_token="public_token", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**public_token:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm Accounts +
client.crm.accounts.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Account` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.accounts.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["owner"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**name:** `typing.Optional[str]` — If provided, will only return accounts with this name. + +
+
+ +
+
+ +**owner_id:** `typing.Optional[str]` — If provided, will only return accounts with this owner. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.accounts.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates an `Account` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import AccountRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.accounts.create( + model=AccountRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `AccountRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.accounts.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Account` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.accounts.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["owner"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.accounts.partial_update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Updates an `Account` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import PatchedAccountRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.accounts.partial_update( + id="id", + model=PatchedAccountRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**model:** `PatchedAccountRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.accounts.meta_patch_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `CRMAccount` PATCHs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.accounts.meta_patch_retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.accounts.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `CRMAccount` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.accounts.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.accounts.remote_field_classes_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `RemoteFieldClass` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.accounts.remote_field_classes_list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm AsyncPassthrough +
client.crm.async_passthrough.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Asynchronously pull data from an endpoint not currently supported by Merge. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import DataPassthroughRequest, MethodEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.async_passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request:** `DataPassthroughRequest` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.async_passthrough.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Retrieves data from earlier async-passthrough POST request +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.async_passthrough.retrieve( + async_passthrough_receipt_id="async_passthrough_receipt_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**async_passthrough_receipt_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm AuditTrail +
client.crm.audit_trail.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Gets a list of audit trail events. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.audit_trail.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_date:** `typing.Optional[str]` — If included, will only include audit trail events that occurred before this time + +
+
+ +
+
+ +**event_type:** `typing.Optional[str]` — If included, will only include events with the given event type. Possible values include: `CREATED_REMOTE_PRODUCTION_API_KEY`, `DELETED_REMOTE_PRODUCTION_API_KEY`, `CREATED_TEST_API_KEY`, `DELETED_TEST_API_KEY`, `REGENERATED_PRODUCTION_API_KEY`, `INVITED_USER`, `TWO_FACTOR_AUTH_ENABLED`, `TWO_FACTOR_AUTH_DISABLED`, `DELETED_LINKED_ACCOUNT`, `CREATED_DESTINATION`, `DELETED_DESTINATION`, `CHANGED_DESTINATION`, `CHANGED_SCOPES`, `CHANGED_PERSONAL_INFORMATION`, `CHANGED_ORGANIZATION_SETTINGS`, `ENABLED_INTEGRATION`, `DISABLED_INTEGRATION`, `ENABLED_CATEGORY`, `DISABLED_CATEGORY`, `CHANGED_PASSWORD`, `RESET_PASSWORD`, `ENABLED_REDACT_UNMAPPED_DATA_FOR_ORGANIZATION`, `ENABLED_REDACT_UNMAPPED_DATA_FOR_LINKED_ACCOUNT`, `DISABLED_REDACT_UNMAPPED_DATA_FOR_ORGANIZATION`, `DISABLED_REDACT_UNMAPPED_DATA_FOR_LINKED_ACCOUNT`, `CREATED_INTEGRATION_WIDE_FIELD_MAPPING`, `CREATED_LINKED_ACCOUNT_FIELD_MAPPING`, `CHANGED_INTEGRATION_WIDE_FIELD_MAPPING`, `CHANGED_LINKED_ACCOUNT_FIELD_MAPPING`, `DELETED_INTEGRATION_WIDE_FIELD_MAPPING`, `DELETED_LINKED_ACCOUNT_FIELD_MAPPING`, `FORCED_LINKED_ACCOUNT_RESYNC`, `MUTED_ISSUE`, `GENERATED_MAGIC_LINK`, `ENABLED_MERGE_WEBHOOK`, `DISABLED_MERGE_WEBHOOK`, `MERGE_WEBHOOK_TARGET_CHANGED` + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**start_date:** `typing.Optional[str]` — If included, will only include audit trail events that occurred after this time + +
+
+ +
+
+ +**user_email:** `typing.Optional[str]` — If provided, this will return events associated with the specified user email. Please note that the email address reflects the user's email at the time of the event, and may not be their current email. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm AvailableActions +
client.crm.available_actions.retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of models and actions available for an account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.available_actions.retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm Contacts +
client.crm.contacts.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Contact` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.contacts.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**account_id:** `typing.Optional[str]` — If provided, will only return contacts with this account. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**email_addresses:** `typing.Optional[str]` — If provided, will only return contacts matching the email addresses; multiple email_addresses can be separated by commas. + +
+
+ +
+
+ +**expand:** `typing.Optional[ContactsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**phone_numbers:** `typing.Optional[str]` — If provided, will only return contacts matching the phone numbers; multiple phone numbers can be separated by commas. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.contacts.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `Contact` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import ContactRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.contacts.create( + model=ContactRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `ContactRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.contacts.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Contact` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.contacts.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[ContactsRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.contacts.partial_update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Updates a `Contact` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import PatchedContactRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.contacts.partial_update( + id="id", + model=PatchedContactRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**model:** `PatchedContactRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.contacts.ignore_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Ignores a specific row based on the `model_id` in the url. These records will have their properties set to null, and will not be updated in future syncs. The "reason" and "message" fields in the request body will be stored for audit purposes. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import IgnoreCommonModelRequest, ReasonEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.contacts.ignore_create( + model_id="model_id", + request=IgnoreCommonModelRequest( + reason=ReasonEnum.GENERAL_CUSTOMER_REQUEST, + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model_id:** `str` + +
+
+ +
+
+ +**request:** `IgnoreCommonModelRequest` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.contacts.meta_patch_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `CRMContact` PATCHs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.contacts.meta_patch_retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.contacts.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `CRMContact` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.contacts.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.contacts.remote_field_classes_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `RemoteFieldClass` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.contacts.remote_field_classes_list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm CustomObjectClasses +
client.crm.custom_object_classes.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `CustomObjectClass` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.custom_object_classes.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["fields"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.custom_object_classes.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `CustomObjectClass` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.custom_object_classes.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["fields"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm AssociationTypes +
client.crm.association_types.custom_object_classes_association_types_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `AssociationType` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.association_types.custom_object_classes_association_types_list( + custom_object_class_id="custom_object_class_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**custom_object_class_id:** `str` + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["target_object_classes"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.association_types.custom_object_classes_association_types_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates an `AssociationType` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import ( + AssociationTypeRequestRequest, + ObjectClassDescriptionRequest, + OriginTypeEnum, +) + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.association_types.custom_object_classes_association_types_create( + custom_object_class_id="custom_object_class_id", + model=AssociationTypeRequestRequest( + source_object_class=ObjectClassDescriptionRequest( + id="id", + origin_type=OriginTypeEnum.CUSTOM_OBJECT, + ), + target_object_classes=[ + ObjectClassDescriptionRequest( + id="id", + origin_type=OriginTypeEnum.CUSTOM_OBJECT, + ) + ], + remote_key_name="remote_key_name", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**custom_object_class_id:** `str` + +
+
+ +
+
+ +**model:** `AssociationTypeRequestRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.association_types.custom_object_classes_association_types_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `AssociationType` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.association_types.custom_object_classes_association_types_retrieve( + custom_object_class_id="custom_object_class_id", + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**custom_object_class_id:** `str` + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["target_object_classes"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.association_types.custom_object_classes_association_types_meta_post_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `CRMAssociationType` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.association_types.custom_object_classes_association_types_meta_post_retrieve( + custom_object_class_id="custom_object_class_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**custom_object_class_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm CustomObjects +
client.crm.custom_objects.custom_object_classes_custom_objects_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `CustomObject` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.custom_objects.custom_object_classes_custom_objects_list( + custom_object_class_id="custom_object_class_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**custom_object_class_id:** `str` + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.custom_objects.custom_object_classes_custom_objects_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `CustomObject` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import CustomObjectRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.custom_objects.custom_object_classes_custom_objects_create( + custom_object_class_id="custom_object_class_id", + model=CustomObjectRequest( + fields={"test_field": "hello"}, + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**custom_object_class_id:** `str` + +
+
+ +
+
+ +**model:** `CustomObjectRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.custom_objects.custom_object_classes_custom_objects_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `CustomObject` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.custom_objects.custom_object_classes_custom_objects_retrieve( + custom_object_class_id="custom_object_class_id", + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**custom_object_class_id:** `str` + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.custom_objects.custom_object_classes_custom_objects_meta_post_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `CRMCustomObject` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.custom_objects.custom_object_classes_custom_objects_meta_post_retrieve( + custom_object_class_id="custom_object_class_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**custom_object_class_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm Associations +
client.crm.associations.custom_object_classes_custom_objects_associations_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Association` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.associations.custom_object_classes_custom_objects_associations_list( + custom_object_class_id="custom_object_class_id", + object_id="object_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**custom_object_class_id:** `str` + +
+
+ +
+
+ +**object_id:** `str` + +
+
+ +
+
+ +**association_type_id:** `typing.Optional[str]` — If provided, will only return opportunities with this association_type. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["association_type"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.associations.custom_object_classes_custom_objects_associations_update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates an Association between `source_object_id` and `target_object_id` of type `association_type_id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.associations.custom_object_classes_custom_objects_associations_update( + association_type_id="association_type_id", + source_class_id="source_class_id", + source_object_id="source_object_id", + target_class_id="target_class_id", + target_object_id="target_object_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**association_type_id:** `str` + +
+
+ +
+
+ +**source_class_id:** `str` + +
+
+ +
+
+ +**source_object_id:** `str` + +
+
+ +
+
+ +**target_class_id:** `str` + +
+
+ +
+
+ +**target_object_id:** `str` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm Scopes +
client.crm.scopes.default_scopes_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get the default permissions for Merge Common Models and fields across all Linked Accounts of a given category. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.scopes.default_scopes_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.scopes.linked_account_scopes_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all available permissions for Merge Common Models and fields for a single Linked Account. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.scopes.linked_account_scopes_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.scopes.linked_account_scopes_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update permissions for any Common Model or field for a single Linked Account. Any Scopes not set in this POST request will inherit the default Scopes. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes) +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import ( + IndividualCommonModelScopeDeserializerRequest, + ModelPermissionDeserializerRequest, +) + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.scopes.linked_account_scopes_create( + common_models=[ + IndividualCommonModelScopeDeserializerRequest( + model_name="Employee", + model_permissions={ + "READ": ModelPermissionDeserializerRequest( + is_enabled=True, + ), + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ), + }, + ), + IndividualCommonModelScopeDeserializerRequest( + model_name="Benefit", + model_permissions={ + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ) + }, + ), + ], +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**common_models:** `typing.Sequence[IndividualCommonModelScopeDeserializerRequest]` — The common models you want to update the scopes for + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm DeleteAccount +
client.crm.delete_account.delete() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.delete_account.delete() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm EngagementTypes +
client.crm.engagement_types.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `EngagementType` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.engagement_types.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.engagement_types.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `EngagementType` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.engagement_types.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.engagement_types.remote_field_classes_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `RemoteFieldClass` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.engagement_types.remote_field_classes_list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm Engagements +
client.crm.engagements.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Engagement` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.engagements.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[EngagementsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**started_after:** `typing.Optional[dt.datetime]` — If provided, will only return engagements started after this datetime. + +
+
+ +
+
+ +**started_before:** `typing.Optional[dt.datetime]` — If provided, will only return engagements started before this datetime. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.engagements.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates an `Engagement` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import EngagementRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.engagements.create( + model=EngagementRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `EngagementRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.engagements.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Engagement` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.engagements.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[EngagementsRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.engagements.partial_update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Updates an `Engagement` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import PatchedEngagementRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.engagements.partial_update( + id="id", + model=PatchedEngagementRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**model:** `PatchedEngagementRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.engagements.meta_patch_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Engagement` PATCHs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.engagements.meta_patch_retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.engagements.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Engagement` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.engagements.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.engagements.remote_field_classes_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `RemoteFieldClass` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.engagements.remote_field_classes_list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm FieldMapping +
client.crm.field_mapping.field_mappings_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all Field Mappings for this Linked Account. Field Mappings are mappings between third-party Remote Fields and user defined Merge fields. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/overview/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.field_mapping.field_mappings_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.field_mapping.field_mappings_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create new Field Mappings that will be available after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.field_mapping.field_mappings_create( + target_field_name="example_target_field_name", + target_field_description="this is a example description of the target field", + remote_field_traversal_path=["example_remote_field"], + remote_method="GET", + remote_url_path="/example-url-path", + common_model_name="ExampleCommonModel", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**target_field_name:** `str` — The name of the target field you want this remote field to map to. + +
+
+ +
+
+ +**target_field_description:** `str` — The description of the target field you want this remote field to map to. + +
+
+ +
+
+ +**remote_field_traversal_path:** `typing.Sequence[typing.Any]` — The field traversal path of the remote field listed when you hit the GET /remote-fields endpoint. + +
+
+ +
+
+ +**remote_method:** `str` — The method of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**remote_url_path:** `str` — The path of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**common_model_name:** `str` — The name of the Common Model that the remote field corresponds to in a given category. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.field_mapping.field_mappings_destroy(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Deletes Field Mappings for a Linked Account. All data related to this Field Mapping will be deleted and these changes will be reflected after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.field_mapping.field_mappings_destroy( + field_mapping_id="field_mapping_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**field_mapping_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.field_mapping.field_mappings_partial_update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create or update existing Field Mappings for a Linked Account. Changes will be reflected after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.field_mapping.field_mappings_partial_update( + field_mapping_id="field_mapping_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**field_mapping_id:** `str` + +
+
+ +
+
+ +**remote_field_traversal_path:** `typing.Optional[typing.Sequence[typing.Any]]` — The field traversal path of the remote field listed when you hit the GET /remote-fields endpoint. + +
+
+ +
+
+ +**remote_method:** `typing.Optional[str]` — The method of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**remote_url_path:** `typing.Optional[str]` — The path of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.field_mapping.remote_fields_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all remote fields for a Linked Account. Remote fields are third-party fields that are accessible after initial sync if remote_data is enabled. You can use remote fields to override existing Merge fields or map a new Merge field. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/overview/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.field_mapping.remote_fields_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**common_models:** `typing.Optional[str]` — A comma seperated list of Common Model names. If included, will only return Remote Fields for those Common Models. + +
+
+ +
+
+ +**include_example_values:** `typing.Optional[str]` — If true, will include example values, where available, for remote fields in the 3rd party platform. These examples come from active data from your customers. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.field_mapping.target_fields_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all organization-wide Target Fields, this will not include any Linked Account specific Target Fields. Organization-wide Target Fields are additional fields appended to the Merge Common Model for all Linked Accounts in a category. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/target-fields/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.field_mapping.target_fields_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm GenerateKey +
client.crm.generate_key.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a remote key. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.generate_key.create( + name="Remote Deployment Key 1", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**name:** `str` — The name of the remote key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm Issues +
client.crm.issues.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Gets issues. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.issues.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**account_token:** `typing.Optional[str]` + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_date:** `typing.Optional[str]` — If included, will only include issues whose most recent action occurred before this time + +
+
+ +
+
+ +**end_user_organization_name:** `typing.Optional[str]` + +
+
+ +
+
+ +**first_incident_time_after:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose first incident time was after this datetime. + +
+
+ +
+
+ +**first_incident_time_before:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose first incident time was before this datetime. + +
+
+ +
+
+ +**include_muted:** `typing.Optional[str]` — If true, will include muted issues + +
+
+ +
+
+ +**integration_name:** `typing.Optional[str]` + +
+
+ +
+
+ +**last_incident_time_after:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose last incident time was after this datetime. + +
+
+ +
+
+ +**last_incident_time_before:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose last incident time was before this datetime. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**start_date:** `typing.Optional[str]` — If included, will only include issues whose most recent action occurred after this time + +
+
+ +
+
+ +**status:** `typing.Optional[IssuesListRequestStatus]` + +Status of the issue. Options: ('ONGOING', 'RESOLVED') + +- `ONGOING` - ONGOING +- `RESOLVED` - RESOLVED + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.issues.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific issue. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.issues.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm Leads +
client.crm.leads.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Lead` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.leads.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**converted_account_id:** `typing.Optional[str]` — If provided, will only return leads with this account. + +
+
+ +
+
+ +**converted_contact_id:** `typing.Optional[str]` — If provided, will only return leads with this contact. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**email_addresses:** `typing.Optional[str]` — If provided, will only return contacts matching the email addresses; multiple email_addresses can be separated by commas. + +
+
+ +
+
+ +**expand:** `typing.Optional[LeadsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**owner_id:** `typing.Optional[str]` — If provided, will only return leads with this owner. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**phone_numbers:** `typing.Optional[str]` — If provided, will only return contacts matching the phone numbers; multiple phone numbers can be separated by commas. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.leads.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `Lead` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import LeadRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.leads.create( + model=LeadRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `LeadRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.leads.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Lead` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.leads.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[LeadsRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.leads.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Lead` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.leads.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.leads.remote_field_classes_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `RemoteFieldClass` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.leads.remote_field_classes_list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm LinkToken +
client.crm.link_token.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a link token to be used when linking a new end user. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import CategoriesEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.link_token.create( + end_user_email_address="example@gmail.com", + end_user_organization_name="Test Organization", + end_user_origin_id="12345", + categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**end_user_email_address:** `str` — Your end user's email address. This is purely for identification purposes - setting this value will not cause any emails to be sent. + +
+
+ +
+
+ +**end_user_organization_name:** `str` — Your end user's organization. + +
+
+ +
+
+ +**end_user_origin_id:** `str` — This unique identifier typically represents the ID for your end user in your product's database. This value must be distinct from other Linked Accounts' unique identifiers. + +
+
+ +
+
+ +**categories:** `typing.Sequence[CategoriesEnum]` — The integration categories to show in Merge Link. + +
+
+ +
+
+ +**integration:** `typing.Optional[str]` — The slug of a specific pre-selected integration for this linking flow token. For examples of slugs, see https://docs.merge.dev/guides/merge-link/single-integration/. + +
+
+ +
+
+ +**link_expiry_mins:** `typing.Optional[int]` — An integer number of minutes between [30, 720 or 10080 if for a Magic Link URL] for how long this token is valid. Defaults to 30. + +
+
+ +
+
+ +**should_create_magic_link_url:** `typing.Optional[bool]` — Whether to generate a Magic Link URL. Defaults to false. For more information on Magic Link, see https://merge.dev/blog/integrations-fast-say-hello-to-magic-link. + +
+
+ +
+
+ +**common_models:** `typing.Optional[typing.Sequence[CommonModelScopesBodyRequest]]` — An array of objects to specify the models and fields that will be disabled for a given Linked Account. Each object uses model_id, enabled_actions, and disabled_fields to specify the model, method, and fields that are scoped for a given Linked Account. + +
+
+ +
+
+ +**category_common_model_scopes:** `typing.Optional[ + typing.Dict[ + str, + typing.Optional[ + typing.Sequence[IndividualCommonModelScopeDeserializerRequest] + ], + ] +]` — When creating a Link Token, you can set permissions for Common Models that will apply to the account that is going to be linked. Any model or field not specified in link token payload will default to existing settings. + +
+
+ +
+
+ +**language:** `typing.Optional[str]` — The language code for the language to localize Merge Link to. + +
+
+ +
+
+ +**integration_specific_config:** `typing.Optional[typing.Dict[str, typing.Any]]` — A JSON object containing integration-specific configuration options. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm LinkedAccounts +
client.crm.linked_accounts.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +List linked accounts for your organization. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.linked_accounts.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**category:** `typing.Optional[LinkedAccountsListRequestCategory]` + +Options: `accounting`, `ats`, `crm`, `filestorage`, `hris`, `mktg`, `ticketing` + +- `hris` - hris +- `ats` - ats +- `accounting` - accounting +- `ticketing` - ticketing +- `crm` - crm +- `mktg` - mktg +- `filestorage` - filestorage + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_user_email_address:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given email address. + +
+
+ +
+
+ +**end_user_organization_name:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given organization name. + +
+
+ +
+
+ +**end_user_origin_id:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given origin ID. + +
+
+ +
+
+ +**end_user_origin_ids:** `typing.Optional[str]` — Comma-separated list of EndUser origin IDs, making it possible to specify multiple EndUsers at once. + +
+
+ +
+
+ +**id:** `typing.Optional[str]` + +
+
+ +
+
+ +**ids:** `typing.Optional[str]` — Comma-separated list of LinkedAccount IDs, making it possible to specify multiple LinkedAccounts at once. + +
+
+ +
+
+ +**include_duplicates:** `typing.Optional[bool]` — If `true`, will include complete production duplicates of the account specified by the `id` query parameter in the response. `id` must be for a complete production linked account. + +
+
+ +
+
+ +**integration_name:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given integration name. + +
+
+ +
+
+ +**is_test_account:** `typing.Optional[str]` — If included, will only include test linked accounts. If not included, will only include non-test linked accounts. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**status:** `typing.Optional[str]` — Filter by status. Options: `COMPLETE`, `INCOMPLETE`, `RELINK_NEEDED` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm Notes +
client.crm.notes.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Note` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.notes.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**account_id:** `typing.Optional[str]` — If provided, will only return notes with this account. + +
+
+ +
+
+ +**contact_id:** `typing.Optional[str]` — If provided, will only return notes with this contact. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[NotesListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**opportunity_id:** `typing.Optional[str]` — If provided, will only return notes with this opportunity. + +
+
+ +
+
+ +**owner_id:** `typing.Optional[str]` — If provided, will only return notes with this owner. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.notes.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `Note` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import NoteRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.notes.create( + model=NoteRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `NoteRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.notes.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Note` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.notes.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[NotesRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.notes.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Note` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.notes.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.notes.remote_field_classes_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `RemoteFieldClass` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.notes.remote_field_classes_list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm Opportunities +
client.crm.opportunities.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Opportunity` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.opportunities.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**account_id:** `typing.Optional[str]` — If provided, will only return opportunities with this account. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[OpportunitiesListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**owner_id:** `typing.Optional[str]` — If provided, will only return opportunities with this owner. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["status"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["status"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**stage_id:** `typing.Optional[str]` — If provided, will only return opportunities with this stage. + +
+
+ +
+
+ +**status:** `typing.Optional[OpportunitiesListRequestStatus]` + +If provided, will only return opportunities with this status. Options: ('OPEN', 'WON', 'LOST') + +- `OPEN` - OPEN +- `WON` - WON +- `LOST` - LOST + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.opportunities.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates an `Opportunity` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import OpportunityRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.opportunities.create( + model=OpportunityRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `OpportunityRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.opportunities.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Opportunity` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.opportunities.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[OpportunitiesRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["status"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["status"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.opportunities.partial_update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Updates an `Opportunity` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import PatchedOpportunityRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.opportunities.partial_update( + id="id", + model=PatchedOpportunityRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**model:** `PatchedOpportunityRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.opportunities.meta_patch_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Opportunity` PATCHs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.opportunities.meta_patch_retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.opportunities.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Opportunity` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.opportunities.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.opportunities.remote_field_classes_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `RemoteFieldClass` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.opportunities.remote_field_classes_list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm Passthrough +
client.crm.passthrough.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Pull data from an endpoint not currently supported by Merge. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import DataPassthroughRequest, MethodEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request:** `DataPassthroughRequest` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm RegenerateKey +
client.crm.regenerate_key.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Exchange remote keys. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.regenerate_key.create( + name="Remote Deployment Key 1", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**name:** `str` — The name of the remote key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm Stages +
client.crm.stages.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Stage` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.stages.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.stages.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Stage` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.stages.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.stages.remote_field_classes_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `RemoteFieldClass` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.stages.remote_field_classes_list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm SyncStatus +
client.crm.sync_status.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get syncing status. Possible values: `DISABLED`, `DONE`, `FAILED`, `PARTIALLY_SYNCED`, `PAUSED`, `SYNCING`. Learn more about sync status in our [Help Center](https://help.merge.dev/en/articles/8184193-merge-sync-statuses). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.sync_status.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm ForceResync +
client.crm.force_resync.sync_status_resync_create() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Force re-sync of all models. This is available for all organizations via the dashboard. Force re-sync is also available programmatically via API for monthly, quarterly, and highest sync frequency customers on the Launch, Professional, or Enterprise plans. Doing so will consume a sync credit for the relevant linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.force_resync.sync_status_resync_create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm Tasks +
client.crm.tasks.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Task` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.tasks.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[TasksListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.tasks.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `Task` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import TaskRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.tasks.create( + model=TaskRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `TaskRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.tasks.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Task` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.tasks.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[TasksRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.tasks.partial_update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Updates a `Task` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import PatchedTaskRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.tasks.partial_update( + id="id", + model=PatchedTaskRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**model:** `PatchedTaskRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.tasks.meta_patch_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Task` PATCHs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.tasks.meta_patch_retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.tasks.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Task` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.tasks.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.tasks.remote_field_classes_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `RemoteFieldClass` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.tasks.remote_field_classes_list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm Users +
client.crm.users.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `User` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.users.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.users.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `User` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.users.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.users.ignore_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Ignores a specific row based on the `model_id` in the url. These records will have their properties set to null, and will not be updated in future syncs. The "reason" and "message" fields in the request body will be stored for audit purposes. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.crm import IgnoreCommonModelRequest, ReasonEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.users.ignore_create( + model_id="model_id", + request=IgnoreCommonModelRequest( + reason=ReasonEnum.GENERAL_CUSTOMER_REQUEST, + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model_id:** `str` + +
+
+ +
+
+ +**request:** `IgnoreCommonModelRequest` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.users.remote_field_classes_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `RemoteFieldClass` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.users.remote_field_classes_list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Crm WebhookReceivers +
client.crm.webhook_receivers.list() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `WebhookReceiver` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.webhook_receivers.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.crm.webhook_receivers.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `WebhookReceiver` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.crm.webhook_receivers.create( + event="event", + is_active=True, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**event:** `str` + +
+
+ +
+
+ +**is_active:** `bool` + +
+
+ +
+
+ +**key:** `typing.Optional[str]` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing AccountDetails +
client.ticketing.account_details.retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get details for a linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.account_details.retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing AccountToken +
client.ticketing.account_token.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns the account token for the end user with the provided public token. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.account_token.retrieve( + public_token="public_token", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**public_token:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing Accounts +
client.ticketing.accounts.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Account` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.accounts.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.accounts.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Account` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.accounts.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing AsyncPassthrough +
client.ticketing.async_passthrough.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Asynchronously pull data from an endpoint not currently supported by Merge. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ticketing import DataPassthroughRequest, MethodEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.async_passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request:** `DataPassthroughRequest` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.async_passthrough.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Retrieves data from earlier async-passthrough POST request +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.async_passthrough.retrieve( + async_passthrough_receipt_id="async_passthrough_receipt_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**async_passthrough_receipt_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing Attachments +
client.ticketing.attachments.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Attachment` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.attachments.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["ticket"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_created_after:** `typing.Optional[dt.datetime]` — If provided, will only return attachments created in the third party platform after this datetime. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**ticket_id:** `typing.Optional[str]` — If provided, will only return comments for this ticket. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.attachments.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates an `Attachment` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ticketing import AttachmentRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.attachments.create( + model=AttachmentRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `AttachmentRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.attachments.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Attachment` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.attachments.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["ticket"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.attachments.download_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Attachment` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.attachments.download_retrieve( + id="string", + mime_type="string", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**mime_type:** `typing.Optional[str]` — If provided, specifies the export format of the file to be downloaded. For information on supported export formats, please refer to our export format help center article. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.attachments.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `TicketingAttachment` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.attachments.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing AuditTrail +
client.ticketing.audit_trail.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Gets a list of audit trail events. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.audit_trail.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_date:** `typing.Optional[str]` — If included, will only include audit trail events that occurred before this time + +
+
+ +
+
+ +**event_type:** `typing.Optional[str]` — If included, will only include events with the given event type. Possible values include: `CREATED_REMOTE_PRODUCTION_API_KEY`, `DELETED_REMOTE_PRODUCTION_API_KEY`, `CREATED_TEST_API_KEY`, `DELETED_TEST_API_KEY`, `REGENERATED_PRODUCTION_API_KEY`, `INVITED_USER`, `TWO_FACTOR_AUTH_ENABLED`, `TWO_FACTOR_AUTH_DISABLED`, `DELETED_LINKED_ACCOUNT`, `CREATED_DESTINATION`, `DELETED_DESTINATION`, `CHANGED_DESTINATION`, `CHANGED_SCOPES`, `CHANGED_PERSONAL_INFORMATION`, `CHANGED_ORGANIZATION_SETTINGS`, `ENABLED_INTEGRATION`, `DISABLED_INTEGRATION`, `ENABLED_CATEGORY`, `DISABLED_CATEGORY`, `CHANGED_PASSWORD`, `RESET_PASSWORD`, `ENABLED_REDACT_UNMAPPED_DATA_FOR_ORGANIZATION`, `ENABLED_REDACT_UNMAPPED_DATA_FOR_LINKED_ACCOUNT`, `DISABLED_REDACT_UNMAPPED_DATA_FOR_ORGANIZATION`, `DISABLED_REDACT_UNMAPPED_DATA_FOR_LINKED_ACCOUNT`, `CREATED_INTEGRATION_WIDE_FIELD_MAPPING`, `CREATED_LINKED_ACCOUNT_FIELD_MAPPING`, `CHANGED_INTEGRATION_WIDE_FIELD_MAPPING`, `CHANGED_LINKED_ACCOUNT_FIELD_MAPPING`, `DELETED_INTEGRATION_WIDE_FIELD_MAPPING`, `DELETED_LINKED_ACCOUNT_FIELD_MAPPING`, `FORCED_LINKED_ACCOUNT_RESYNC`, `MUTED_ISSUE`, `GENERATED_MAGIC_LINK`, `ENABLED_MERGE_WEBHOOK`, `DISABLED_MERGE_WEBHOOK`, `MERGE_WEBHOOK_TARGET_CHANGED` + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**start_date:** `typing.Optional[str]` — If included, will only include audit trail events that occurred after this time + +
+
+ +
+
+ +**user_email:** `typing.Optional[str]` — If provided, this will return events associated with the specified user email. Please note that the email address reflects the user's email at the time of the event, and may not be their current email. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing AvailableActions +
client.ticketing.available_actions.retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of models and actions available for an account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.available_actions.retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing Collections +
client.ticketing.collections.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Collection` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.collections.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**collection_type:** `typing.Optional[str]` — If provided, will only return collections of the given type. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["parent_collection"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**parent_collection_id:** `typing.Optional[str]` — If provided, will only return collections whose parent collection matches the given id. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["collection_type"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["collection_type"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.collections.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Collection` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.collections.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["parent_collection"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["collection_type"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["collection_type"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.collections.users_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `User` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.collections.users_list( + parent_id="parent_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**parent_id:** `str` + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[CollectionsUsersListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing Comments +
client.ticketing.comments.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Comment` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.comments.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[CommentsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_created_after:** `typing.Optional[dt.datetime]` — If provided, will only return Comments created in the third party platform after this datetime. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**ticket_id:** `typing.Optional[str]` — If provided, will only return comments for this ticket. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.comments.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `Comment` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ticketing import CommentRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.comments.create( + model=CommentRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `CommentRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.comments.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Comment` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.comments.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[CommentsRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.comments.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Comment` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.comments.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing Contacts +
client.ticketing.contacts.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Contact` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.contacts.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["account"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.contacts.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `Contact` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ticketing import ContactRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.contacts.create( + model=ContactRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `ContactRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.contacts.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Contact` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.contacts.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["account"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.contacts.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `TicketingContact` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.contacts.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing Scopes +
client.ticketing.scopes.default_scopes_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get the default permissions for Merge Common Models and fields across all Linked Accounts of a given category. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.scopes.default_scopes_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.scopes.linked_account_scopes_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all available permissions for Merge Common Models and fields for a single Linked Account. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.scopes.linked_account_scopes_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.scopes.linked_account_scopes_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update permissions for any Common Model or field for a single Linked Account. Any Scopes not set in this POST request will inherit the default Scopes. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes) +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ticketing import ( + IndividualCommonModelScopeDeserializerRequest, + ModelPermissionDeserializerRequest, +) + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.scopes.linked_account_scopes_create( + common_models=[ + IndividualCommonModelScopeDeserializerRequest( + model_name="Employee", + model_permissions={ + "READ": ModelPermissionDeserializerRequest( + is_enabled=True, + ), + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ), + }, + ), + IndividualCommonModelScopeDeserializerRequest( + model_name="Benefit", + model_permissions={ + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ) + }, + ), + ], +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**common_models:** `typing.Sequence[IndividualCommonModelScopeDeserializerRequest]` — The common models you want to update the scopes for + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing DeleteAccount +
client.ticketing.delete_account.delete() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.delete_account.delete() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing FieldMapping +
client.ticketing.field_mapping.field_mappings_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all Field Mappings for this Linked Account. Field Mappings are mappings between third-party Remote Fields and user defined Merge fields. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/overview/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.field_mapping.field_mappings_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.field_mapping.field_mappings_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create new Field Mappings that will be available after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.field_mapping.field_mappings_create( + target_field_name="example_target_field_name", + target_field_description="this is a example description of the target field", + remote_field_traversal_path=["example_remote_field"], + remote_method="GET", + remote_url_path="/example-url-path", + common_model_name="ExampleCommonModel", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**target_field_name:** `str` — The name of the target field you want this remote field to map to. + +
+
+ +
+
+ +**target_field_description:** `str` — The description of the target field you want this remote field to map to. + +
+
+ +
+
+ +**remote_field_traversal_path:** `typing.Sequence[typing.Any]` — The field traversal path of the remote field listed when you hit the GET /remote-fields endpoint. + +
+
+ +
+
+ +**remote_method:** `str` — The method of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**remote_url_path:** `str` — The path of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**common_model_name:** `str` — The name of the Common Model that the remote field corresponds to in a given category. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.field_mapping.field_mappings_destroy(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Deletes Field Mappings for a Linked Account. All data related to this Field Mapping will be deleted and these changes will be reflected after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.field_mapping.field_mappings_destroy( + field_mapping_id="field_mapping_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**field_mapping_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.field_mapping.field_mappings_partial_update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create or update existing Field Mappings for a Linked Account. Changes will be reflected after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.field_mapping.field_mappings_partial_update( + field_mapping_id="field_mapping_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**field_mapping_id:** `str` + +
+
+ +
+
+ +**remote_field_traversal_path:** `typing.Optional[typing.Sequence[typing.Any]]` — The field traversal path of the remote field listed when you hit the GET /remote-fields endpoint. + +
+
+ +
+
+ +**remote_method:** `typing.Optional[str]` — The method of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**remote_url_path:** `typing.Optional[str]` — The path of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.field_mapping.remote_fields_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all remote fields for a Linked Account. Remote fields are third-party fields that are accessible after initial sync if remote_data is enabled. You can use remote fields to override existing Merge fields or map a new Merge field. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/overview/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.field_mapping.remote_fields_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**common_models:** `typing.Optional[str]` — A comma seperated list of Common Model names. If included, will only return Remote Fields for those Common Models. + +
+
+ +
+
+ +**include_example_values:** `typing.Optional[str]` — If true, will include example values, where available, for remote fields in the 3rd party platform. These examples come from active data from your customers. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.field_mapping.target_fields_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all organization-wide Target Fields, this will not include any Linked Account specific Target Fields. Organization-wide Target Fields are additional fields appended to the Merge Common Model for all Linked Accounts in a category. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/target-fields/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.field_mapping.target_fields_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing GenerateKey +
client.ticketing.generate_key.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a remote key. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.generate_key.create( + name="Remote Deployment Key 1", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**name:** `str` — The name of the remote key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing Issues +
client.ticketing.issues.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Gets issues. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.issues.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**account_token:** `typing.Optional[str]` + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_date:** `typing.Optional[str]` — If included, will only include issues whose most recent action occurred before this time + +
+
+ +
+
+ +**end_user_organization_name:** `typing.Optional[str]` + +
+
+ +
+
+ +**first_incident_time_after:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose first incident time was after this datetime. + +
+
+ +
+
+ +**first_incident_time_before:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose first incident time was before this datetime. + +
+
+ +
+
+ +**include_muted:** `typing.Optional[str]` — If true, will include muted issues + +
+
+ +
+
+ +**integration_name:** `typing.Optional[str]` + +
+
+ +
+
+ +**last_incident_time_after:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose last incident time was after this datetime. + +
+
+ +
+
+ +**last_incident_time_before:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose last incident time was before this datetime. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**start_date:** `typing.Optional[str]` — If included, will only include issues whose most recent action occurred after this time + +
+
+ +
+
+ +**status:** `typing.Optional[IssuesListRequestStatus]` + +Status of the issue. Options: ('ONGOING', 'RESOLVED') + +- `ONGOING` - ONGOING +- `RESOLVED` - RESOLVED + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.issues.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific issue. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.issues.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing LinkToken +
client.ticketing.link_token.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a link token to be used when linking a new end user. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ticketing import CategoriesEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.link_token.create( + end_user_email_address="example@gmail.com", + end_user_organization_name="Test Organization", + end_user_origin_id="12345", + categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**end_user_email_address:** `str` — Your end user's email address. This is purely for identification purposes - setting this value will not cause any emails to be sent. + +
+
+ +
+
+ +**end_user_organization_name:** `str` — Your end user's organization. + +
+
+ +
+
+ +**end_user_origin_id:** `str` — This unique identifier typically represents the ID for your end user in your product's database. This value must be distinct from other Linked Accounts' unique identifiers. + +
+
+ +
+
+ +**categories:** `typing.Sequence[CategoriesEnum]` — The integration categories to show in Merge Link. + +
+
+ +
+
+ +**integration:** `typing.Optional[str]` — The slug of a specific pre-selected integration for this linking flow token. For examples of slugs, see https://docs.merge.dev/guides/merge-link/single-integration/. + +
+
+ +
+
+ +**link_expiry_mins:** `typing.Optional[int]` — An integer number of minutes between [30, 720 or 10080 if for a Magic Link URL] for how long this token is valid. Defaults to 30. + +
+
+ +
+
+ +**should_create_magic_link_url:** `typing.Optional[bool]` — Whether to generate a Magic Link URL. Defaults to false. For more information on Magic Link, see https://merge.dev/blog/integrations-fast-say-hello-to-magic-link. + +
+
+ +
+
+ +**common_models:** `typing.Optional[typing.Sequence[CommonModelScopesBodyRequest]]` — An array of objects to specify the models and fields that will be disabled for a given Linked Account. Each object uses model_id, enabled_actions, and disabled_fields to specify the model, method, and fields that are scoped for a given Linked Account. + +
+
+ +
+
+ +**category_common_model_scopes:** `typing.Optional[ + typing.Dict[ + str, + typing.Optional[ + typing.Sequence[IndividualCommonModelScopeDeserializerRequest] + ], + ] +]` — When creating a Link Token, you can set permissions for Common Models that will apply to the account that is going to be linked. Any model or field not specified in link token payload will default to existing settings. + +
+
+ +
+
+ +**language:** `typing.Optional[str]` — The language code for the language to localize Merge Link to. + +
+
+ +
+
+ +**integration_specific_config:** `typing.Optional[typing.Dict[str, typing.Any]]` — A JSON object containing integration-specific configuration options. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing LinkedAccounts +
client.ticketing.linked_accounts.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +List linked accounts for your organization. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.linked_accounts.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**category:** `typing.Optional[LinkedAccountsListRequestCategory]` + +Options: ('hris', 'ats', 'accounting', 'ticketing', 'crm', 'mktg', 'filestorage') + +- `hris` - hris +- `ats` - ats +- `accounting` - accounting +- `ticketing` - ticketing +- `crm` - crm +- `mktg` - mktg +- `filestorage` - filestorage + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_user_email_address:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given email address. + +
+
+ +
+
+ +**end_user_organization_name:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given organization name. + +
+
+ +
+
+ +**end_user_origin_id:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given origin ID. + +
+
+ +
+
+ +**end_user_origin_ids:** `typing.Optional[str]` — Comma-separated list of EndUser origin IDs, making it possible to specify multiple EndUsers at once. + +
+
+ +
+
+ +**id:** `typing.Optional[str]` + +
+
+ +
+
+ +**ids:** `typing.Optional[str]` — Comma-separated list of LinkedAccount IDs, making it possible to specify multiple LinkedAccounts at once. + +
+
+ +
+
+ +**include_duplicates:** `typing.Optional[bool]` — If `true`, will include complete production duplicates of the account specified by the `id` query parameter in the response. `id` must be for a complete production linked account. + +
+
+ +
+
+ +**integration_name:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given integration name. + +
+
+ +
+
+ +**is_test_account:** `typing.Optional[str]` — If included, will only include test linked accounts. If not included, will only include non-test linked accounts. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**status:** `typing.Optional[str]` — Filter by status. Options: `COMPLETE`, `INCOMPLETE`, `RELINK_NEEDED` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing Passthrough +
client.ticketing.passthrough.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Pull data from an endpoint not currently supported by Merge. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ticketing import DataPassthroughRequest, MethodEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request:** `DataPassthroughRequest` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing Projects +
client.ticketing.projects.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Project` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.projects.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.projects.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Project` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.projects.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.projects.users_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `User` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.projects.users_list( + parent_id="parent_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**parent_id:** `str` + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[ProjectsUsersListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing RegenerateKey +
client.ticketing.regenerate_key.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Exchange remote keys. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.regenerate_key.create( + name="Remote Deployment Key 1", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**name:** `str` — The name of the remote key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing Roles +
client.ticketing.roles.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Role` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.roles.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.roles.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Role` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.roles.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing SyncStatus +
client.ticketing.sync_status.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get syncing status. Possible values: `DISABLED`, `DONE`, `FAILED`, `PARTIALLY_SYNCED`, `PAUSED`, `SYNCING`. Learn more about sync status in our [Help Center](https://help.merge.dev/en/articles/8184193-merge-sync-statuses). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.sync_status.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing ForceResync +
client.ticketing.force_resync.sync_status_resync_create() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Force re-sync of all models. This is available for all organizations via the dashboard. Force re-sync is also available programmatically via API for monthly, quarterly, and highest sync frequency customers on the Launch, Professional, or Enterprise plans. Doing so will consume a sync credit for the relevant linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.force_resync.sync_status_resync_create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing Tags +
client.ticketing.tags.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Tag` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.tags.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.tags.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Tag` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.tags.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing Teams +
client.ticketing.teams.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Team` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.teams.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.teams.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Team` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.teams.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing Tickets +
client.ticketing.tickets.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Ticket` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.tickets.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**account_id:** `typing.Optional[str]` — If provided, will only return tickets for this account. + +
+
+ +
+
+ +**assignee_ids:** `typing.Optional[str]` — If provided, will only return tickets assigned to the assignee_ids; multiple assignee_ids can be separated by commas. + +
+
+ +
+
+ +**collection_ids:** `typing.Optional[str]` — If provided, will only return tickets assigned to the collection_ids; multiple collection_ids can be separated by commas. + +
+
+ +
+
+ +**completed_after:** `typing.Optional[dt.datetime]` — If provided, will only return tickets completed after this datetime. + +
+
+ +
+
+ +**completed_before:** `typing.Optional[dt.datetime]` — If provided, will only return tickets completed before this datetime. + +
+
+ +
+
+ +**contact_id:** `typing.Optional[str]` — If provided, will only return tickets for this contact. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**due_after:** `typing.Optional[dt.datetime]` — If provided, will only return tickets due after this datetime. + +
+
+ +
+
+ +**due_before:** `typing.Optional[dt.datetime]` — If provided, will only return tickets due before this datetime. + +
+
+ +
+
+ +**expand:** `typing.Optional[TicketsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**parent_ticket_id:** `typing.Optional[str]` — If provided, will only return sub tickets of the parent_ticket_id. + +
+
+ +
+
+ +**priority:** `typing.Optional[TicketsListRequestPriority]` + +If provided, will only return tickets of this priority. + +- `URGENT` - URGENT +- `HIGH` - HIGH +- `NORMAL` - NORMAL +- `LOW` - LOW + +
+
+ +
+
+ +**remote_created_after:** `typing.Optional[dt.datetime]` — If provided, will only return tickets created in the third party platform after this datetime. + +
+
+ +
+
+ +**remote_created_before:** `typing.Optional[dt.datetime]` — If provided, will only return tickets created in the third party platform before this datetime. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[TicketsListRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**remote_updated_after:** `typing.Optional[dt.datetime]` — If provided, will only return tickets updated in the third party platform after this datetime. + +
+
+ +
+
+ +**remote_updated_before:** `typing.Optional[dt.datetime]` — If provided, will only return tickets updated in the third party platform before this datetime. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[TicketsListRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**status:** `typing.Optional[TicketsListRequestStatus]` + +If provided, will only return tickets of this status. + +- `OPEN` - OPEN +- `CLOSED` - CLOSED +- `IN_PROGRESS` - IN_PROGRESS +- `ON_HOLD` - ON_HOLD + +
+
+ +
+
+ +**tags:** `typing.Optional[str]` — If provided, will only return tickets matching the tags; multiple tags can be separated by commas. + +
+
+ +
+
+ +**ticket_type:** `typing.Optional[str]` — If provided, will only return tickets of this type. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.tickets.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `Ticket` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ticketing import TicketRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.tickets.create( + model=TicketRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `TicketRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.tickets.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Ticket` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.tickets.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[TicketsRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**include_remote_fields:** `typing.Optional[bool]` — Whether to include all remote fields, including fields that Merge did not map to common models, in a normalized format. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[TicketsRetrieveRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[TicketsRetrieveRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.tickets.partial_update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Updates a `Ticket` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.ticketing import PatchedTicketRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.tickets.partial_update( + id="id", + model=PatchedTicketRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**model:** `PatchedTicketRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.tickets.collaborators_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `User` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.tickets.collaborators_list( + parent_id="parent_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**parent_id:** `str` + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[TicketsCollaboratorsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.tickets.meta_patch_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Ticket` PATCHs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.tickets.meta_patch_retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.tickets.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Ticket` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.tickets.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.tickets.remote_field_classes_list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `RemoteFieldClass` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.tickets.remote_field_classes_list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing Users +
client.ticketing.users.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `User` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.users.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**email_address:** `typing.Optional[str]` — If provided, will only return users with emails equal to this value (case insensitive). + +
+
+ +
+
+ +**expand:** `typing.Optional[UsersListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.users.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `User` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.users.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[UsersRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Ticketing WebhookReceivers +
client.ticketing.webhook_receivers.list() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `WebhookReceiver` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.webhook_receivers.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.ticketing.webhook_receivers.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `WebhookReceiver` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.ticketing.webhook_receivers.create( + event="event", + is_active=True, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**event:** `str` + +
+
+ +
+
+ +**is_active:** `bool` + +
+
+ +
+
+ +**key:** `typing.Optional[str]` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting AccountDetails +
client.accounting.account_details.retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get details for a linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.account_details.retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting AccountToken +
client.accounting.account_token.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns the account token for the end user with the provided public token. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.account_token.retrieve( + public_token="public_token", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**public_token:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting AccountingPeriods +
client.accounting.accounting_periods.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `AccountingPeriod` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.accounting_periods.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.accounting_periods.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `AccountingPeriod` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.accounting_periods.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting Accounts +
client.accounting.accounts.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Account` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.accounts.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return accounts for this company. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["company"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[AccountsListRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[AccountsListRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.accounts.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates an `Account` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.accounting import AccountRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.accounts.create( + model=AccountRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `AccountRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.accounts.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Account` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.accounts.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["company"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[AccountsRetrieveRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[AccountsRetrieveRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.accounts.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Account` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.accounts.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting Addresses +
client.accounting.addresses.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Address` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.addresses.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["type"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["type"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting AsyncPassthrough +
client.accounting.async_passthrough.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Asynchronously pull data from an endpoint not currently supported by Merge. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.accounting import DataPassthroughRequest, MethodEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.async_passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request:** `DataPassthroughRequest` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.async_passthrough.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Retrieves data from earlier async-passthrough POST request +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.async_passthrough.retrieve( + async_passthrough_receipt_id="async_passthrough_receipt_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**async_passthrough_receipt_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting Attachments +
client.accounting.attachments.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `AccountingAttachment` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.attachments.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return accounting attachments for this company. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.attachments.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates an `AccountingAttachment` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.accounting import AccountingAttachmentRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.attachments.create( + model=AccountingAttachmentRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `AccountingAttachmentRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.attachments.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `AccountingAttachment` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.attachments.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.attachments.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `AccountingAttachment` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.attachments.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting AuditTrail +
client.accounting.audit_trail.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Gets a list of audit trail events. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.audit_trail.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_date:** `typing.Optional[str]` — If included, will only include audit trail events that occurred before this time + +
+
+ +
+
+ +**event_type:** `typing.Optional[str]` — If included, will only include events with the given event type. Possible values include: `CREATED_REMOTE_PRODUCTION_API_KEY`, `DELETED_REMOTE_PRODUCTION_API_KEY`, `CREATED_TEST_API_KEY`, `DELETED_TEST_API_KEY`, `REGENERATED_PRODUCTION_API_KEY`, `INVITED_USER`, `TWO_FACTOR_AUTH_ENABLED`, `TWO_FACTOR_AUTH_DISABLED`, `DELETED_LINKED_ACCOUNT`, `CREATED_DESTINATION`, `DELETED_DESTINATION`, `CHANGED_DESTINATION`, `CHANGED_SCOPES`, `CHANGED_PERSONAL_INFORMATION`, `CHANGED_ORGANIZATION_SETTINGS`, `ENABLED_INTEGRATION`, `DISABLED_INTEGRATION`, `ENABLED_CATEGORY`, `DISABLED_CATEGORY`, `CHANGED_PASSWORD`, `RESET_PASSWORD`, `ENABLED_REDACT_UNMAPPED_DATA_FOR_ORGANIZATION`, `ENABLED_REDACT_UNMAPPED_DATA_FOR_LINKED_ACCOUNT`, `DISABLED_REDACT_UNMAPPED_DATA_FOR_ORGANIZATION`, `DISABLED_REDACT_UNMAPPED_DATA_FOR_LINKED_ACCOUNT`, `CREATED_INTEGRATION_WIDE_FIELD_MAPPING`, `CREATED_LINKED_ACCOUNT_FIELD_MAPPING`, `CHANGED_INTEGRATION_WIDE_FIELD_MAPPING`, `CHANGED_LINKED_ACCOUNT_FIELD_MAPPING`, `DELETED_INTEGRATION_WIDE_FIELD_MAPPING`, `DELETED_LINKED_ACCOUNT_FIELD_MAPPING`, `FORCED_LINKED_ACCOUNT_RESYNC`, `MUTED_ISSUE`, `GENERATED_MAGIC_LINK`, `ENABLED_MERGE_WEBHOOK`, `DISABLED_MERGE_WEBHOOK`, `MERGE_WEBHOOK_TARGET_CHANGED` + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**start_date:** `typing.Optional[str]` — If included, will only include audit trail events that occurred after this time + +
+
+ +
+
+ +**user_email:** `typing.Optional[str]` — If provided, this will return events associated with the specified user email. Please note that the email address reflects the user's email at the time of the event, and may not be their current email. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting AvailableActions +
client.accounting.available_actions.retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of models and actions available for an account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.available_actions.retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting BalanceSheets +
client.accounting.balance_sheets.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `BalanceSheet` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.balance_sheets.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return balance sheets for this company. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["company"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.balance_sheets.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `BalanceSheet` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.balance_sheets.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["company"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting CashFlowStatements +
client.accounting.cash_flow_statements.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `CashFlowStatement` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.cash_flow_statements.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return cash flow statements for this company. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["company"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.cash_flow_statements.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `CashFlowStatement` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.cash_flow_statements.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["company"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting CompanyInfo +
client.accounting.company_info.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `CompanyInfo` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.company_info.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[CompanyInfoListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.company_info.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `CompanyInfo` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.company_info.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[CompanyInfoRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting Contacts +
client.accounting.contacts.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Contact` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.contacts.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return contacts for this company. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[ContactsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**is_customer:** `typing.Optional[str]` — If provided, will only return Contacts that are denoted as customers. + +
+
+ +
+
+ +**is_supplier:** `typing.Optional[str]` — If provided, will only return Contacts that are denoted as suppliers. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["status"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["status"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.contacts.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `Contact` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.accounting import ContactRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.contacts.create( + model=ContactRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `ContactRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.contacts.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Contact` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.contacts.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[ContactsRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["status"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["status"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.contacts.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Contact` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.contacts.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting CreditNotes +
client.accounting.credit_notes.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `CreditNote` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.credit_notes.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return credit notes for this company. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[CreditNotesListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[CreditNotesListRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[CreditNotesListRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**transaction_date_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**transaction_date_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.credit_notes.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `CreditNote` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.credit_notes.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[CreditNotesRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[CreditNotesRetrieveRequestRemoteFields]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[CreditNotesRetrieveRequestShowEnumOrigins]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting Scopes +
client.accounting.scopes.default_scopes_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get the default permissions for Merge Common Models and fields across all Linked Accounts of a given category. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.scopes.default_scopes_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.scopes.linked_account_scopes_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all available permissions for Merge Common Models and fields for a single Linked Account. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.scopes.linked_account_scopes_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.scopes.linked_account_scopes_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Update permissions for any Common Model or field for a single Linked Account. Any Scopes not set in this POST request will inherit the default Scopes. [Learn more](https://help.merge.dev/en/articles/8828211-common-model-and-field-scopes) +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.accounting import ( + IndividualCommonModelScopeDeserializerRequest, + ModelPermissionDeserializerRequest, +) + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.scopes.linked_account_scopes_create( + common_models=[ + IndividualCommonModelScopeDeserializerRequest( + model_name="Employee", + model_permissions={ + "READ": ModelPermissionDeserializerRequest( + is_enabled=True, + ), + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ), + }, + ), + IndividualCommonModelScopeDeserializerRequest( + model_name="Benefit", + model_permissions={ + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ) + }, + ), + ], +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**common_models:** `typing.Sequence[IndividualCommonModelScopeDeserializerRequest]` — The common models you want to update the scopes for + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting DeleteAccount +
client.accounting.delete_account.delete() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Delete a linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.delete_account.delete() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting Expenses +
client.accounting.expenses.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Expense` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.expenses.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return expenses for this company. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[ExpensesListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**transaction_date_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**transaction_date_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.expenses.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates an `Expense` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.accounting import ExpenseRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.expenses.create( + model=ExpenseRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `ExpenseRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.expenses.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Expense` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.expenses.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[ExpensesRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.expenses.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Expense` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.expenses.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting FieldMapping +
client.accounting.field_mapping.field_mappings_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all Field Mappings for this Linked Account. Field Mappings are mappings between third-party Remote Fields and user defined Merge fields. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/overview/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.field_mapping.field_mappings_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.field_mapping.field_mappings_create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create new Field Mappings that will be available after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.field_mapping.field_mappings_create( + target_field_name="example_target_field_name", + target_field_description="this is a example description of the target field", + remote_field_traversal_path=["example_remote_field"], + remote_method="GET", + remote_url_path="/example-url-path", + common_model_name="ExampleCommonModel", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**target_field_name:** `str` — The name of the target field you want this remote field to map to. + +
+
+ +
+
+ +**target_field_description:** `str` — The description of the target field you want this remote field to map to. + +
+
+ +
+
+ +**remote_field_traversal_path:** `typing.Sequence[typing.Any]` — The field traversal path of the remote field listed when you hit the GET /remote-fields endpoint. + +
+
+ +
+
+ +**remote_method:** `str` — The method of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**remote_url_path:** `str` — The path of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**common_model_name:** `str` — The name of the Common Model that the remote field corresponds to in a given category. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.field_mapping.field_mappings_destroy(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Deletes Field Mappings for a Linked Account. All data related to this Field Mapping will be deleted and these changes will be reflected after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.field_mapping.field_mappings_destroy( + field_mapping_id="field_mapping_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**field_mapping_id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.field_mapping.field_mappings_partial_update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create or update existing Field Mappings for a Linked Account. Changes will be reflected after the next scheduled sync. This will cause the next sync for this Linked Account to sync **ALL** data from start. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.field_mapping.field_mappings_partial_update( + field_mapping_id="field_mapping_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**field_mapping_id:** `str` + +
+
+ +
+
+ +**remote_field_traversal_path:** `typing.Optional[typing.Sequence[typing.Any]]` — The field traversal path of the remote field listed when you hit the GET /remote-fields endpoint. + +
+
+ +
+
+ +**remote_method:** `typing.Optional[str]` — The method of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**remote_url_path:** `typing.Optional[str]` — The path of the remote endpoint where the remote field is coming from. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.field_mapping.remote_fields_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all remote fields for a Linked Account. Remote fields are third-party fields that are accessible after initial sync if remote_data is enabled. You can use remote fields to override existing Merge fields or map a new Merge field. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/overview/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.field_mapping.remote_fields_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**common_models:** `typing.Optional[str]` — A comma seperated list of Common Model names. If included, will only return Remote Fields for those Common Models. + +
+
+ +
+
+ +**include_example_values:** `typing.Optional[str]` — If true, will include example values, where available, for remote fields in the 3rd party platform. These examples come from active data from your customers. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.field_mapping.target_fields_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get all organization-wide Target Fields, this will not include any Linked Account specific Target Fields. Organization-wide Target Fields are additional fields appended to the Merge Common Model for all Linked Accounts in a category. [Learn more](https://docs.merge.dev/supplemental-data/field-mappings/target-fields/). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.field_mapping.target_fields_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting GenerateKey +
client.accounting.generate_key.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Create a remote key. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.generate_key.create( + name="Remote Deployment Key 1", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**name:** `str` — The name of the remote key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting IncomeStatements +
client.accounting.income_statements.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `IncomeStatement` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.income_statements.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return income statements for this company. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["company"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.income_statements.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `IncomeStatement` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.income_statements.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["company"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting Invoices +
client.accounting.invoices.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Invoice` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.invoices.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return invoices for this company. + +
+
+ +
+
+ +**contact_id:** `typing.Optional[str]` — If provided, will only return invoices for this contact. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[InvoicesListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**issue_date_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**issue_date_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["type"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["type"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**type:** `typing.Optional[InvoicesListRequestType]` + +If provided, will only return Invoices with this type + +- `ACCOUNTS_RECEIVABLE` - ACCOUNTS_RECEIVABLE +- `ACCOUNTS_PAYABLE` - ACCOUNTS_PAYABLE + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.invoices.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates an `Invoice` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.accounting import InvoiceRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.invoices.create( + model=InvoiceRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `InvoiceRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.invoices.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Invoice` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.invoices.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[InvoicesRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["type"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["type"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.invoices.partial_update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Updates an `Invoice` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.accounting import InvoiceRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.invoices.partial_update( + id="id", + model=InvoiceRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**model:** `InvoiceRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.invoices.meta_patch_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Invoice` PATCHs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.invoices.meta_patch_retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.invoices.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Invoice` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.invoices.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting Issues +
client.accounting.issues.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Gets issues. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.issues.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**account_token:** `typing.Optional[str]` + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_date:** `typing.Optional[str]` — If included, will only include issues whose most recent action occurred before this time + +
+
+ +
+
+ +**end_user_organization_name:** `typing.Optional[str]` + +
+
+ +
+
+ +**first_incident_time_after:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose first incident time was after this datetime. + +
+
+ +
+
+ +**first_incident_time_before:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose first incident time was before this datetime. + +
+
+ +
+
+ +**include_muted:** `typing.Optional[str]` — If true, will include muted issues + +
+
+ +
+
+ +**integration_name:** `typing.Optional[str]` + +
+
+ +
+
+ +**last_incident_time_after:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose last incident time was after this datetime. + +
+
+ +
+
+ +**last_incident_time_before:** `typing.Optional[dt.datetime]` — If provided, will only return issues whose last incident time was before this datetime. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**start_date:** `typing.Optional[str]` — If included, will only include issues whose most recent action occurred after this time + +
+
+ +
+
+ +**status:** `typing.Optional[IssuesListRequestStatus]` + +Status of the issue. Options: ('ONGOING', 'RESOLVED') + +- `ONGOING` - ONGOING +- `RESOLVED` - RESOLVED + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.issues.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get a specific issue. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.issues.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting Items +
client.accounting.items.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Item` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.items.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return items for this company. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[ItemsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["status"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["status"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.items.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `Item` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.items.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[ItemsRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["status"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["status"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting JournalEntries +
client.accounting.journal_entries.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `JournalEntry` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.journal_entries.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return journal entries for this company. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[JournalEntriesListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**transaction_date_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**transaction_date_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.journal_entries.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `JournalEntry` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.accounting import JournalEntryRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.journal_entries.create( + model=JournalEntryRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `JournalEntryRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.journal_entries.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `JournalEntry` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.journal_entries.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[JournalEntriesRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.journal_entries.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `JournalEntry` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.journal_entries.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting LinkToken +
client.accounting.link_token.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a link token to be used when linking a new end user. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.accounting import CategoriesEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.link_token.create( + end_user_email_address="example@gmail.com", + end_user_organization_name="Test Organization", + end_user_origin_id="12345", + categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**end_user_email_address:** `str` — Your end user's email address. This is purely for identification purposes - setting this value will not cause any emails to be sent. + +
+
+ +
+
+ +**end_user_organization_name:** `str` — Your end user's organization. + +
+
+ +
+
+ +**end_user_origin_id:** `str` — This unique identifier typically represents the ID for your end user in your product's database. This value must be distinct from other Linked Accounts' unique identifiers. + +
+
+ +
+
+ +**categories:** `typing.Sequence[CategoriesEnum]` — The integration categories to show in Merge Link. + +
+
+ +
+
+ +**integration:** `typing.Optional[str]` — The slug of a specific pre-selected integration for this linking flow token. For examples of slugs, see https://docs.merge.dev/guides/merge-link/single-integration/. + +
+
+ +
+
+ +**link_expiry_mins:** `typing.Optional[int]` — An integer number of minutes between [30, 720 or 10080 if for a Magic Link URL] for how long this token is valid. Defaults to 30. + +
+
+ +
+
+ +**should_create_magic_link_url:** `typing.Optional[bool]` — Whether to generate a Magic Link URL. Defaults to false. For more information on Magic Link, see https://merge.dev/blog/integrations-fast-say-hello-to-magic-link. + +
+
+ +
+
+ +**common_models:** `typing.Optional[typing.Sequence[CommonModelScopesBodyRequest]]` — An array of objects to specify the models and fields that will be disabled for a given Linked Account. Each object uses model_id, enabled_actions, and disabled_fields to specify the model, method, and fields that are scoped for a given Linked Account. + +
+
+ +
+
+ +**category_common_model_scopes:** `typing.Optional[ + typing.Dict[ + str, + typing.Optional[ + typing.Sequence[IndividualCommonModelScopeDeserializerRequest] + ], + ] +]` — When creating a Link Token, you can set permissions for Common Models that will apply to the account that is going to be linked. Any model or field not specified in link token payload will default to existing settings. + +
+
+ +
+
+ +**language:** `typing.Optional[str]` — The language code for the language to localize Merge Link to. + +
+
+ +
+
+ +**integration_specific_config:** `typing.Optional[typing.Dict[str, typing.Any]]` — A JSON object containing integration-specific configuration options. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting LinkedAccounts +
client.accounting.linked_accounts.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +List linked accounts for your organization. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.linked_accounts.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**category:** `typing.Optional[LinkedAccountsListRequestCategory]` + +Options: `accounting`, `ats`, `crm`, `filestorage`, `hris`, `mktg`, `ticketing` + +- `hris` - hris +- `ats` - ats +- `accounting` - accounting +- `ticketing` - ticketing +- `crm` - crm +- `mktg` - mktg +- `filestorage` - filestorage + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**end_user_email_address:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given email address. + +
+
+ +
+
+ +**end_user_organization_name:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given organization name. + +
+
+ +
+
+ +**end_user_origin_id:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given origin ID. + +
+
+ +
+
+ +**end_user_origin_ids:** `typing.Optional[str]` — Comma-separated list of EndUser origin IDs, making it possible to specify multiple EndUsers at once. + +
+
+ +
+
+ +**id:** `typing.Optional[str]` + +
+
+ +
+
+ +**ids:** `typing.Optional[str]` — Comma-separated list of LinkedAccount IDs, making it possible to specify multiple LinkedAccounts at once. + +
+
+ +
+
+ +**include_duplicates:** `typing.Optional[bool]` — If `true`, will include complete production duplicates of the account specified by the `id` query parameter in the response. `id` must be for a complete production linked account. + +
+
+ +
+
+ +**integration_name:** `typing.Optional[str]` — If provided, will only return linked accounts associated with the given integration name. + +
+
+ +
+
+ +**is_test_account:** `typing.Optional[str]` — If included, will only include test linked accounts. If not included, will only include non-test linked accounts. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**status:** `typing.Optional[str]` — Filter by status. Options: `COMPLETE`, `INCOMPLETE`, `RELINK_NEEDED` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting Passthrough +
client.accounting.passthrough.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Pull data from an endpoint not currently supported by Merge. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.accounting import DataPassthroughRequest, MethodEnum + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request:** `DataPassthroughRequest` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting Payments +
client.accounting.payments.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Payment` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.payments.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**account_id:** `typing.Optional[str]` — If provided, will only return payments for this account. + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return payments for this company. + +
+
+ +
+
+ +**contact_id:** `typing.Optional[str]` — If provided, will only return payments for this contact. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[PaymentsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**transaction_date_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**transaction_date_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.payments.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `Payment` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.accounting import PaymentRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.payments.create( + model=PaymentRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `PaymentRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.payments.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Payment` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.payments.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[PaymentsRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.payments.partial_update(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Updates a `Payment` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.accounting import PatchedPaymentRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.payments.partial_update( + id="id", + model=PatchedPaymentRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**model:** `PatchedPaymentRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.payments.meta_patch_retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Payment` PATCHs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.payments.meta_patch_retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.payments.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `Payment` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.payments.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting PhoneNumbers +
client.accounting.phone_numbers.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns an `AccountingPhoneNumber` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.phone_numbers.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting PurchaseOrders +
client.accounting.purchase_orders.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `PurchaseOrder` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.purchase_orders.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return purchase orders for this company. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[PurchaseOrdersListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**issue_date_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**issue_date_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["status"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["status"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.purchase_orders.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `PurchaseOrder` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge +from merge.resources.accounting import PurchaseOrderRequest + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.purchase_orders.create( + model=PurchaseOrderRequest(), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**model:** `PurchaseOrderRequest` + +
+
+ +
+
+ +**is_debug_mode:** `typing.Optional[bool]` — Whether to include debug fields (such as log file links) in the response. + +
+
+ +
+
+ +**run_async:** `typing.Optional[bool]` — Whether or not third-party updates should be run asynchronously. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.purchase_orders.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `PurchaseOrder` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.purchase_orders.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[PurchaseOrdersRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["status"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["status"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.purchase_orders.meta_post_retrieve() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns metadata for `PurchaseOrder` POSTs. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.purchase_orders.meta_post_retrieve() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting RegenerateKey +
client.accounting.regenerate_key.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Exchange remote keys. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.regenerate_key.create( + name="Remote Deployment Key 1", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**name:** `str` — The name of the remote key + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting SyncStatus +
client.accounting.sync_status.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get syncing status. Possible values: `DISABLED`, `DONE`, `FAILED`, `PARTIALLY_SYNCED`, `PAUSED`, `SYNCING`. Learn more about sync status in our [Help Center](https://help.merge.dev/en/articles/8184193-merge-sync-statuses). +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.sync_status.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting ForceResync +
client.accounting.force_resync.sync_status_resync_create() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Force re-sync of all models. This is available for all organizations via the dashboard. Force re-sync is also available programmatically via API for monthly, quarterly, and highest sync frequency customers on the Launch, Professional, or Enterprise plans. Doing so will consume a sync credit for the relevant linked account. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.force_resync.sync_status_resync_create() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting TaxRates +
client.accounting.tax_rates.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `TaxRate` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.tax_rates.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return tax rates for this company. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["company"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.tax_rates.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `TaxRate` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.tax_rates.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["company"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting TrackingCategories +
client.accounting.tracking_categories.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `TrackingCategory` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.tracking_categories.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return tracking categories for this company. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["company"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["status"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["status"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.tracking_categories.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `TrackingCategory` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.tracking_categories.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[typing.Literal["company"]]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**remote_fields:** `typing.Optional[typing.Literal["status"]]` — Deprecated. Use show_enum_origins. + +
+
+ +
+
+ +**show_enum_origins:** `typing.Optional[typing.Literal["status"]]` — A comma separated list of enum field names for which you'd like the original values to be returned, instead of Merge's normalized enum values. [Learn more](https://help.merge.dev/en/articles/8950958-show_enum_origins-query-parameter) + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting Transactions +
client.accounting.transactions.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `Transaction` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.transactions.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return accounting transactions for this company. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[TransactionsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**transaction_date_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**transaction_date_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.transactions.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `Transaction` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.transactions.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[TransactionsRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting VendorCredits +
client.accounting.vendor_credits.list(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `VendorCredit` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.vendor_credits.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**company_id:** `typing.Optional[str]` — If provided, will only return vendor credits for this company. + +
+
+ +
+
+ +**created_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**created_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**cursor:** `typing.Optional[str]` — The pagination cursor value. + +
+
+ +
+
+ +**expand:** `typing.Optional[VendorCreditsListRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_deleted_data:** `typing.Optional[bool]` — Whether to include data that was marked as deleted by third party webhooks. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**modified_after:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge after this date time will be returned. + +
+
+ +
+
+ +**modified_before:** `typing.Optional[dt.datetime]` — If provided, only objects synced by Merge before this date time will be returned. + +
+
+ +
+
+ +**page_size:** `typing.Optional[int]` — Number of results to return per page. + +
+
+ +
+
+ +**remote_id:** `typing.Optional[str]` — The API provider's ID for the given object. + +
+
+ +
+
+ +**transaction_date_after:** `typing.Optional[dt.datetime]` — If provided, will only return objects created after this datetime. + +
+
+ +
+
+ +**transaction_date_before:** `typing.Optional[dt.datetime]` — If provided, will only return objects created before this datetime. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.vendor_credits.retrieve(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a `VendorCredit` object with the given `id`. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.vendor_credits.retrieve( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `str` + +
+
+ +
+
+ +**expand:** `typing.Optional[VendorCreditsRetrieveRequestExpand]` — Which relations should be returned in expanded form. Multiple relation names should be comma separated without spaces. + +
+
+ +
+
+ +**include_remote_data:** `typing.Optional[bool]` — Whether to include the original data Merge fetched from the third-party to produce these models. + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +## Accounting WebhookReceivers +
client.accounting.webhook_receivers.list() +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Returns a list of `WebhookReceiver` objects. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.webhook_receivers.list() + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.accounting.webhook_receivers.create(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Creates a `WebhookReceiver` object with the given values. +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from merge.client import Merge + +client = Merge( + account_token="YOUR_ACCOUNT_TOKEN", + api_key="YOUR_API_KEY", +) +client.accounting.webhook_receivers.create( + event="event", + is_active=True, +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**event:** `str` + +
+
+ +
+
+ +**is_active:** `bool` + +
+
+ +
+
+ +**key:** `typing.Optional[str]` + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ diff --git a/src/merge/client.py b/src/merge/client.py index 48012ff0..3aeec464 100644 --- a/src/merge/client.py +++ b/src/merge/client.py @@ -76,10 +76,10 @@ def __init__( else httpx.Client(timeout=_defaulted_timeout), timeout=_defaulted_timeout, ) + self.filestorage = FilestorageClient(client_wrapper=self._client_wrapper) self.ats = AtsClient(client_wrapper=self._client_wrapper) - self.crm = CrmClient(client_wrapper=self._client_wrapper) self.hris = HrisClient(client_wrapper=self._client_wrapper) - self.filestorage = FilestorageClient(client_wrapper=self._client_wrapper) + self.crm = CrmClient(client_wrapper=self._client_wrapper) self.ticketing = TicketingClient(client_wrapper=self._client_wrapper) self.accounting = AccountingClient(client_wrapper=self._client_wrapper) @@ -146,10 +146,10 @@ def __init__( else httpx.AsyncClient(timeout=_defaulted_timeout), timeout=_defaulted_timeout, ) + self.filestorage = AsyncFilestorageClient(client_wrapper=self._client_wrapper) self.ats = AsyncAtsClient(client_wrapper=self._client_wrapper) - self.crm = AsyncCrmClient(client_wrapper=self._client_wrapper) self.hris = AsyncHrisClient(client_wrapper=self._client_wrapper) - self.filestorage = AsyncFilestorageClient(client_wrapper=self._client_wrapper) + self.crm = AsyncCrmClient(client_wrapper=self._client_wrapper) self.ticketing = AsyncTicketingClient(client_wrapper=self._client_wrapper) self.accounting = AsyncAccountingClient(client_wrapper=self._client_wrapper) diff --git a/src/merge/core/__init__.py b/src/merge/core/__init__.py index 58ad52ad..1f453efe 100644 --- a/src/merge/core/__init__.py +++ b/src/merge/core/__init__.py @@ -6,7 +6,16 @@ from .file import File, convert_file_dict_to_httpx_tuples from .http_client import AsyncHttpClient, HttpClient from .jsonable_encoder import jsonable_encoder -from .pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .pydantic_utilities import ( + IS_PYDANTIC_V2, + UniversalBaseModel, + UniversalRootModel, + deep_union_pydantic_dicts, + parse_obj_as, + universal_field_validator, + universal_root_validator, + update_forward_refs, +) from .query_encoder import encode_query from .remove_none_from_dict import remove_none_from_dict from .request_options import RequestOptions @@ -18,13 +27,19 @@ "BaseClientWrapper", "File", "HttpClient", + "IS_PYDANTIC_V2", "RequestOptions", "SyncClientWrapper", + "UniversalBaseModel", + "UniversalRootModel", "convert_file_dict_to_httpx_tuples", "deep_union_pydantic_dicts", "encode_query", "jsonable_encoder", - "pydantic_v1", + "parse_obj_as", "remove_none_from_dict", "serialize_datetime", + "universal_field_validator", + "universal_root_validator", + "update_forward_refs", ] diff --git a/src/merge/core/client_wrapper.py b/src/merge/core/client_wrapper.py index cca1d58c..d7583e08 100644 --- a/src/merge/core/client_wrapper.py +++ b/src/merge/core/client_wrapper.py @@ -25,7 +25,7 @@ def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { "X-Fern-Language": "Python", "X-Fern-SDK-Name": "MergePythonClient", - "X-Fern-SDK-Version": "1.0.12", + "X-Fern-SDK-Version": "1.1.0rc0", } if self._account_token is not None: headers["X-Account-Token"] = self._account_token diff --git a/src/merge/core/http_client.py b/src/merge/core/http_client.py index ed7cf5d3..9333d8a7 100644 --- a/src/merge/core/http_client.py +++ b/src/merge/core/http_client.py @@ -2,6 +2,7 @@ import asyncio import email.utils +import json import re import time import typing @@ -107,7 +108,7 @@ def maybe_filter_request_body( ) -> typing.Optional[typing.Any]: if data is None: return ( - jsonable_encoder(request_options.get("additional_body_parameters", {})) + jsonable_encoder(request_options.get("additional_body_parameters", {})) or {} if request_options is not None else None ) @@ -117,7 +118,7 @@ def maybe_filter_request_body( data_content = { **(jsonable_encoder(remove_omit_from_dict(data, omit))), # type: ignore **( - jsonable_encoder(request_options.get("additional_body_parameters", {})) + jsonable_encoder(request_options.get("additional_body_parameters", {})) or {} if request_options is not None else {} ), @@ -125,6 +126,25 @@ def maybe_filter_request_body( return data_content +# Abstracted out for testing purposes +def get_request_body( + *, + json: typing.Optional[typing.Any], + data: typing.Optional[typing.Any], + request_options: typing.Optional[RequestOptions], + omit: typing.Optional[typing.Any], +) -> typing.Tuple[typing.Optional[typing.Any], typing.Optional[typing.Any]]: + json_body = None + data_body = None + if data is not None: + data_body = maybe_filter_request_body(data, request_options, omit) + else: + # If both data and json are None, we send json data in the event extra properties are specified + json_body = maybe_filter_request_body(json, request_options, omit) + + return json_body, data_body + + class HttpClient: def __init__( self, @@ -168,6 +188,8 @@ def request( else self.base_timeout ) + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + response = self.httpx_client.request( method=method, url=urllib.parse.urljoin(f"{base_url}/", path), @@ -176,7 +198,7 @@ def request( { **self.base_headers, **(headers if headers is not None else {}), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), + **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}), } ) ), @@ -187,7 +209,7 @@ def request( { **(params if params is not None else {}), **( - request_options.get("additional_query_parameters", {}) + request_options.get("additional_query_parameters", {}) or {} if request_options is not None else {} ), @@ -197,8 +219,8 @@ def request( ) ) ), - json=maybe_filter_request_body(json, request_options, omit), - data=maybe_filter_request_body(data, request_options, omit), + json=json_body, + data=data_body, content=content, files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None, timeout=timeout, @@ -248,6 +270,8 @@ def stream( else self.base_timeout ) + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + with self.httpx_client.stream( method=method, url=urllib.parse.urljoin(f"{base_url}/", path), @@ -277,8 +301,8 @@ def stream( ) ) ), - json=maybe_filter_request_body(json, request_options, omit), - data=maybe_filter_request_body(data, request_options, omit), + json=json_body, + data=data_body, content=content, files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None, timeout=timeout, @@ -329,6 +353,8 @@ async def request( else self.base_timeout ) + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + # Add the input to each of these and do None-safety checks response = await self.httpx_client.request( method=method, @@ -338,7 +364,7 @@ async def request( { **self.base_headers, **(headers if headers is not None else {}), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), + **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}), } ) ), @@ -349,7 +375,7 @@ async def request( { **(params if params is not None else {}), **( - request_options.get("additional_query_parameters", {}) + request_options.get("additional_query_parameters", {}) or {} if request_options is not None else {} ), @@ -359,8 +385,8 @@ async def request( ) ) ), - json=maybe_filter_request_body(json, request_options, omit), - data=maybe_filter_request_body(data, request_options, omit), + json=json_body, + data=data_body, content=content, files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None, timeout=timeout, @@ -409,6 +435,8 @@ async def stream( else self.base_timeout ) + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + async with self.httpx_client.stream( method=method, url=urllib.parse.urljoin(f"{base_url}/", path), @@ -438,8 +466,8 @@ async def stream( ) ) ), - json=maybe_filter_request_body(json, request_options, omit), - data=maybe_filter_request_body(data, request_options, omit), + json=json_body, + data=data_body, content=content, files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None, timeout=timeout, diff --git a/src/merge/core/jsonable_encoder.py b/src/merge/core/jsonable_encoder.py index 7f482732..9251cd58 100644 --- a/src/merge/core/jsonable_encoder.py +++ b/src/merge/core/jsonable_encoder.py @@ -8,33 +8,23 @@ https://github.com/tiangolo/fastapi/blob/master/fastapi/encoders.py """ +import base64 import dataclasses import datetime as dt -from collections import defaultdict from enum import Enum from pathlib import PurePath from types import GeneratorType -from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Set, Union + +import pydantic from .datetime_utils import serialize_datetime -from .pydantic_utilities import pydantic_v1 +from .pydantic_utilities import IS_PYDANTIC_V2, encode_by_type, to_jsonable_with_fallback SetIntStr = Set[Union[int, str]] DictIntStrAny = Dict[Union[int, str], Any] -def generate_encoders_by_class_tuples( - type_encoder_map: Dict[Any, Callable[[Any], Any]] -) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]: - encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(tuple) - for type_, encoder in type_encoder_map.items(): - encoders_by_class_tuples[encoder] += (type_,) - return encoders_by_class_tuples - - -encoders_by_class_tuples = generate_encoders_by_class_tuples(pydantic_v1.json.ENCODERS_BY_TYPE) - - def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None) -> Any: custom_encoder = custom_encoder or {} if custom_encoder: @@ -44,17 +34,24 @@ def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any] for encoder_type, encoder_instance in custom_encoder.items(): if isinstance(obj, encoder_type): return encoder_instance(obj) - if isinstance(obj, pydantic_v1.BaseModel): - encoder = getattr(obj.__config__, "json_encoders", {}) + if isinstance(obj, pydantic.BaseModel): + if IS_PYDANTIC_V2: + encoder = getattr(obj.model_config, "json_encoders", {}) # type: ignore # Pydantic v2 + else: + encoder = getattr(obj.__config__, "json_encoders", {}) # type: ignore # Pydantic v1 if custom_encoder: encoder.update(custom_encoder) obj_dict = obj.dict(by_alias=True) if "__root__" in obj_dict: obj_dict = obj_dict["__root__"] + if "root" in obj_dict: + obj_dict = obj_dict["root"] return jsonable_encoder(obj_dict, custom_encoder=encoder) if dataclasses.is_dataclass(obj): obj_dict = dataclasses.asdict(obj) return jsonable_encoder(obj_dict, custom_encoder=custom_encoder) + if isinstance(obj, bytes): + return base64.b64encode(obj).decode("utf-8") if isinstance(obj, Enum): return obj.value if isinstance(obj, PurePath): @@ -80,20 +77,21 @@ def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any] encoded_list.append(jsonable_encoder(item, custom_encoder=custom_encoder)) return encoded_list - if type(obj) in pydantic_v1.json.ENCODERS_BY_TYPE: - return pydantic_v1.json.ENCODERS_BY_TYPE[type(obj)](obj) - for encoder, classes_tuple in encoders_by_class_tuples.items(): - if isinstance(obj, classes_tuple): - return encoder(obj) + def fallback_serializer(o: Any) -> Any: + attempt_encode = encode_by_type(o) + if attempt_encode is not None: + return attempt_encode - try: - data = dict(obj) - except Exception as e: - errors: List[Exception] = [] - errors.append(e) try: - data = vars(obj) + data = dict(o) except Exception as e: + errors: List[Exception] = [] errors.append(e) - raise ValueError(errors) from e - return jsonable_encoder(data, custom_encoder=custom_encoder) + try: + data = vars(o) + except Exception as e: + errors.append(e) + raise ValueError(errors) from e + return jsonable_encoder(data, custom_encoder=custom_encoder) + + return to_jsonable_with_fallback(obj, fallback_serializer) diff --git a/src/merge/core/pydantic_utilities.py b/src/merge/core/pydantic_utilities.py index a72c1a52..0f24b0ea 100644 --- a/src/merge/core/pydantic_utilities.py +++ b/src/merge/core/pydantic_utilities.py @@ -1,15 +1,53 @@ # This file was auto-generated by Fern from our API Definition. +# nopycln: file +import datetime as dt import typing +from collections import defaultdict +from functools import wraps import pydantic +from .datetime_utils import serialize_datetime + IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.") if IS_PYDANTIC_V2: - import pydantic.v1 as pydantic_v1 # type: ignore # nopycln: import + # isort will try to reformat the comments on these imports, which breaks mypy + # isort: off + from pydantic.v1.datetime_parse import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + parse_date as parse_date, + ) + from pydantic.v1.datetime_parse import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + parse_datetime as parse_datetime, + ) + from pydantic.v1.json import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + ENCODERS_BY_TYPE as encoders_by_type, + ) + from pydantic.v1.typing import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 + get_args as get_args, + ) + from pydantic.v1.typing import get_origin as get_origin # pyright: ignore[reportMissingImports] # Pydantic v2 + from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2 + is_literal_type as is_literal_type, + ) + from pydantic.v1.typing import is_union as is_union # pyright: ignore[reportMissingImports] # Pydantic v2 + from pydantic.v1.fields import ModelField as ModelField # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2 else: - import pydantic as pydantic_v1 # type: ignore # nopycln: import + from pydantic.datetime_parse import parse_date as parse_date # type: ignore # Pydantic v1 + from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore # Pydantic v1 + from pydantic.fields import ModelField as ModelField # type: ignore # Pydantic v1 + from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore # Pydantic v1 + from pydantic.typing import get_args as get_args # type: ignore # Pydantic v1 + from pydantic.typing import get_origin as get_origin # type: ignore # Pydantic v1 + from pydantic.typing import is_literal_type as is_literal_type # type: ignore # Pydantic v1 + from pydantic.typing import is_union as is_union # type: ignore # Pydantic v1 + + # isort: on + + +T = typing.TypeVar("T") +Model = typing.TypeVar("Model", bound=pydantic.BaseModel) def deep_union_pydantic_dicts( @@ -25,4 +63,117 @@ def deep_union_pydantic_dicts( return destination -__all__ = ["pydantic_v1"] +def parse_obj_as(type_: typing.Type[T], object_: typing.Any) -> T: + if IS_PYDANTIC_V2: + adapter = pydantic.TypeAdapter(type_) # type: ignore # Pydantic v2 + return adapter.validate_python(object_) + else: + return pydantic.parse_obj_as(type_, object_) + + +def to_jsonable_with_fallback( + obj: typing.Any, fallback_serializer: typing.Callable[[typing.Any], typing.Any] +) -> typing.Any: + if IS_PYDANTIC_V2: + from pydantic_core import to_jsonable_python + + return to_jsonable_python(obj, fallback=fallback_serializer) + else: + return fallback_serializer(obj) + + +class UniversalBaseModel(pydantic.BaseModel): + class Config: + populate_by_name = True + smart_union = True + allow_population_by_field_name = True + json_encoders = {dt.datetime: serialize_datetime} + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + if IS_PYDANTIC_V2: + return super().model_dump_json(**kwargs_with_defaults) # type: ignore # Pydantic v2 + else: + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} + + if IS_PYDANTIC_V2: + return deep_union_pydantic_dicts( + super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore # Pydantic v2 + super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore # Pydantic v2 + ) + else: + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) + ) + + +UniversalRootModel: typing.Type[typing.Any] +if IS_PYDANTIC_V2: + + class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore # Pydantic v2 + pass + + UniversalRootModel = V2RootModel +else: + UniversalRootModel = UniversalBaseModel + + +def encode_by_type(o: typing.Any) -> typing.Any: + encoders_by_class_tuples: typing.Dict[ + typing.Callable[[typing.Any], typing.Any], typing.Tuple[typing.Any, ...] + ] = defaultdict(tuple) + for type_, encoder in encoders_by_type.items(): + encoders_by_class_tuples[encoder] += (type_,) + + if type(o) in encoders_by_type: + return encoders_by_type[type(o)](o) + for encoder, classes_tuple in encoders_by_class_tuples.items(): + if isinstance(o, classes_tuple): + return encoder(o) + + +def update_forward_refs(model: typing.Type["Model"], **localns: typing.Any) -> None: + if IS_PYDANTIC_V2: + model.model_rebuild(force=True) # type: ignore # Pydantic v2 + else: + model.update_forward_refs(**localns) + + +# Mirrors Pydantic's internal typing +AnyCallable = typing.Callable[..., typing.Any] + + +def universal_root_validator(pre: bool = False) -> typing.Callable[[AnyCallable], AnyCallable]: + def decorator(func: AnyCallable) -> AnyCallable: + @wraps(func) + def validate(*args: typing.Any, **kwargs: typing.Any) -> AnyCallable: + if IS_PYDANTIC_V2: + wrapped_func = pydantic.model_validator("before" if pre else "after")(func) # type: ignore # Pydantic v2 + else: + wrapped_func = pydantic.root_validator(pre=pre)(func) # type: ignore # Pydantic v1 + + return wrapped_func(*args, **kwargs) + + return validate + + return decorator + + +def universal_field_validator(field_name: str, pre: bool = False) -> typing.Callable[[AnyCallable], AnyCallable]: + def decorator(func: AnyCallable) -> AnyCallable: + @wraps(func) + def validate(*args: typing.Any, **kwargs: typing.Any) -> AnyCallable: + if IS_PYDANTIC_V2: + wrapped_func = pydantic.field_validator(field_name, mode="before" if pre else "after")(func) # type: ignore # Pydantic v2 + else: + wrapped_func = pydantic.validator(field_name, pre=pre)(func) + + return wrapped_func(*args, **kwargs) + + return validate + + return decorator diff --git a/src/merge/core/query_encoder.py b/src/merge/core/query_encoder.py index 1f5f766b..24076d72 100644 --- a/src/merge/core/query_encoder.py +++ b/src/merge/core/query_encoder.py @@ -3,7 +3,7 @@ from collections import ChainMap from typing import Any, Dict, Optional -from .pydantic_utilities import pydantic_v1 +import pydantic # Flattens dicts to be of the form {"key[subkey][subkey2]": value} where value is not a dict @@ -19,8 +19,8 @@ def traverse_query_dict(dict_flat: Dict[str, Any], key_prefix: Optional[str] = N def single_query_encoder(query_key: str, query_value: Any) -> Dict[str, Any]: - if isinstance(query_value, pydantic_v1.BaseModel) or isinstance(query_value, dict): - if isinstance(query_value, pydantic_v1.BaseModel): + if isinstance(query_value, pydantic.BaseModel) or isinstance(query_value, dict): + if isinstance(query_value, pydantic.BaseModel): obj_dict = query_value.dict(by_alias=True) else: obj_dict = query_value diff --git a/src/merge/core/request_options.py b/src/merge/core/request_options.py index cd6f27a7..d0bf0dbc 100644 --- a/src/merge/core/request_options.py +++ b/src/merge/core/request_options.py @@ -5,10 +5,10 @@ try: from typing import NotRequired # type: ignore except ImportError: - from typing_extensions import NotRequired # type: ignore + from typing_extensions import NotRequired -class RequestOptions(typing.TypedDict): +class RequestOptions(typing.TypedDict, total=False): """ Additional options for request-specific configuration when calling APIs via the SDK. This is used primarily as an optional final parameter for service functions. diff --git a/src/merge/resources/accounting/resources/account_details/client.py b/src/merge/resources/accounting/resources/account_details/client.py index cff69698..51483314 100644 --- a/src/merge/resources/accounting/resources/account_details/client.py +++ b/src/merge/resources/accounting/resources/account_details/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.account_details import AccountDetails @@ -41,9 +41,9 @@ def retrieve(self, *, request_options: typing.Optional[RequestOptions] = None) - _response = self._client_wrapper.httpx_client.request( "accounting/v1/account-details", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountDetails, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountDetails, parse_obj_as(type_=AccountDetails, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -70,20 +70,28 @@ async def retrieve(self, *, request_options: typing.Optional[RequestOptions] = N Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.account_details.retrieve() + + + async def main() -> None: + await client.accounting.account_details.retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/account-details", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountDetails, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountDetails, parse_obj_as(type_=AccountDetails, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/account_token/client.py b/src/merge/resources/accounting/resources/account_token/client.py index 637a18ab..a4ae5634 100644 --- a/src/merge/resources/accounting/resources/account_token/client.py +++ b/src/merge/resources/accounting/resources/account_token/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.account_token import AccountToken @@ -48,9 +48,9 @@ def retrieve(self, public_token: str, *, request_options: typing.Optional[Reques method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountToken, parse_obj_as(type_=AccountToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -81,24 +81,32 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.account_token.retrieve( - public_token="public_token", - ) + + + async def main() -> None: + await client.accounting.account_token.retrieve( + public_token="public_token", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/account-token/{jsonable_encoder(public_token)}", method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountToken, parse_obj_as(type_=AccountToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/accounting_periods/client.py b/src/merge/resources/accounting/resources/accounting_periods/client.py index b355029a..4e2229aa 100644 --- a/src/merge/resources/accounting/resources/accounting_periods/client.py +++ b/src/merge/resources/accounting/resources/accounting_periods/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.accounting_period import AccountingPeriod from ...types.paginated_accounting_period_list import PaginatedAccountingPeriodList @@ -71,9 +71,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountingPeriodList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountingPeriodList, parse_obj_as(type_=PaginatedAccountingPeriodList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -122,9 +122,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountingPeriod, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountingPeriod, parse_obj_as(type_=AccountingPeriod, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -171,13 +171,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.accounting_periods.list() + + + async def main() -> None: + await client.accounting.accounting_periods.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/accounting-periods", @@ -190,9 +198,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountingPeriodList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountingPeriodList, parse_obj_as(type_=PaginatedAccountingPeriodList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -225,15 +233,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.accounting_periods.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.accounting_periods.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/accounting-periods/{jsonable_encoder(id)}", @@ -241,9 +257,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountingPeriod, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountingPeriod, parse_obj_as(type_=AccountingPeriod, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/accounts/client.py b/src/merge/resources/accounting/resources/accounts/client.py index dac8ff4e..84fbe3c9 100644 --- a/src/merge/resources/accounting/resources/accounts/client.py +++ b/src/merge/resources/accounting/resources/accounts/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.account import Account from ...types.account_request import AccountRequest @@ -128,9 +128,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountList, parse_obj_as(type_=PaginatedAccountList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -186,9 +186,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountResponse, parse_obj_as(type_=AccountResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -254,9 +254,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Account, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Account, parse_obj_as(type_=Account, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -289,9 +289,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "accounting/v1/accounts/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -374,13 +374,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.accounts.list() + + + async def main() -> None: + await client.accounting.accounts.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/accounts", @@ -402,9 +410,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountList, parse_obj_as(type_=PaginatedAccountList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -441,6 +449,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.accounting import AccountRequest @@ -448,9 +458,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.accounts.create( - model=AccountRequest(), - ) + + + async def main() -> None: + await client.accounting.accounts.create( + model=AccountRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/accounts", @@ -460,9 +476,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountResponse, parse_obj_as(type_=AccountResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -507,15 +523,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.accounts.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.accounts.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/accounts/{jsonable_encoder(id)}", @@ -528,9 +552,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Account, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Account, parse_obj_as(type_=Account, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -552,20 +576,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.accounts.meta_post_retrieve() + + + async def main() -> None: + await client.accounting.accounts.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/accounts/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/addresses/client.py b/src/merge/resources/accounting/resources/addresses/client.py index 51276ec4..8b2b4755 100644 --- a/src/merge/resources/accounting/resources/addresses/client.py +++ b/src/merge/resources/accounting/resources/addresses/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.address import Address @@ -70,9 +70,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Address, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Address, parse_obj_as(type_=Address, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -118,15 +118,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.addresses.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.addresses.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/addresses/{jsonable_encoder(id)}", @@ -138,9 +146,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Address, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Address, parse_obj_as(type_=Address, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/async_passthrough/client.py b/src/merge/resources/accounting/resources/async_passthrough/client.py index 25181d37..b4c1bfaf 100644 --- a/src/merge/resources/accounting/resources/async_passthrough/client.py +++ b/src/merge/resources/accounting/resources/async_passthrough/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.async_passthrough_reciept import AsyncPassthroughReciept from ...types.data_passthrough_request import DataPassthroughRequest @@ -57,9 +57,9 @@ def create( _response = self._client_wrapper.httpx_client.request( "accounting/v1/async-passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AsyncPassthroughReciept, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AsyncPassthroughReciept, parse_obj_as(type_=AsyncPassthroughReciept, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -100,9 +100,9 @@ def retrieve( method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -133,6 +133,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.accounting import DataPassthroughRequest, MethodEnum @@ -140,19 +142,25 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.async_passthrough.create( - request=DataPassthroughRequest( - method=MethodEnum.GET, - path="/scooters", - ), - ) + + + async def main() -> None: + await client.accounting.async_passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/async-passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AsyncPassthroughReciept, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AsyncPassthroughReciept, parse_obj_as(type_=AsyncPassthroughReciept, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -178,24 +186,32 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.async_passthrough.retrieve( - async_passthrough_receipt_id="async_passthrough_receipt_id", - ) + + + async def main() -> None: + await client.accounting.async_passthrough.retrieve( + async_passthrough_receipt_id="async_passthrough_receipt_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/async-passthrough/{jsonable_encoder(async_passthrough_receipt_id)}", method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/attachments/client.py b/src/merge/resources/accounting/resources/attachments/client.py index 2b0c395e..bced09c5 100644 --- a/src/merge/resources/accounting/resources/attachments/client.py +++ b/src/merge/resources/accounting/resources/attachments/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.accounting_attachment import AccountingAttachment from ...types.accounting_attachment_request import AccountingAttachmentRequest @@ -109,9 +109,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountingAttachmentList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountingAttachmentList, parse_obj_as(type_=PaginatedAccountingAttachmentList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -167,9 +167,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountingAttachmentResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountingAttachmentResponse, parse_obj_as(type_=AccountingAttachmentResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -218,9 +218,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountingAttachment, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountingAttachment, parse_obj_as(type_=AccountingAttachment, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -253,9 +253,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "accounting/v1/attachments/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -326,13 +326,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.attachments.list() + + + async def main() -> None: + await client.accounting.attachments.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/attachments", @@ -351,9 +359,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountingAttachmentList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountingAttachmentList, parse_obj_as(type_=PaginatedAccountingAttachmentList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -390,6 +398,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.accounting import AccountingAttachmentRequest @@ -397,9 +407,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.attachments.create( - model=AccountingAttachmentRequest(), - ) + + + async def main() -> None: + await client.accounting.attachments.create( + model=AccountingAttachmentRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/attachments", @@ -409,9 +425,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountingAttachmentResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountingAttachmentResponse, parse_obj_as(type_=AccountingAttachmentResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -444,15 +460,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.attachments.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.attachments.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/attachments/{jsonable_encoder(id)}", @@ -460,9 +484,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountingAttachment, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountingAttachment, parse_obj_as(type_=AccountingAttachment, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -484,20 +508,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.attachments.meta_post_retrieve() + + + async def main() -> None: + await client.accounting.attachments.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/attachments/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/audit_trail/client.py b/src/merge/resources/accounting/resources/audit_trail/client.py index 691f459d..39a140a6 100644 --- a/src/merge/resources/accounting/resources/audit_trail/client.py +++ b/src/merge/resources/accounting/resources/audit_trail/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_audit_log_event_list import PaginatedAuditLogEventList @@ -79,9 +79,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAuditLogEventList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAuditLogEventList, parse_obj_as(type_=PaginatedAuditLogEventList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -136,13 +136,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.audit_trail.list() + + + async def main() -> None: + await client.accounting.audit_trail.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/audit-trail", @@ -157,9 +165,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAuditLogEventList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAuditLogEventList, parse_obj_as(type_=PaginatedAuditLogEventList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/available_actions/client.py b/src/merge/resources/accounting/resources/available_actions/client.py index 7de301d2..f9e0bde8 100644 --- a/src/merge/resources/accounting/resources/available_actions/client.py +++ b/src/merge/resources/accounting/resources/available_actions/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.available_actions import AvailableActions @@ -41,9 +41,9 @@ def retrieve(self, *, request_options: typing.Optional[RequestOptions] = None) - _response = self._client_wrapper.httpx_client.request( "accounting/v1/available-actions", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AvailableActions, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AvailableActions, parse_obj_as(type_=AvailableActions, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -70,20 +70,28 @@ async def retrieve(self, *, request_options: typing.Optional[RequestOptions] = N Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.available_actions.retrieve() + + + async def main() -> None: + await client.accounting.available_actions.retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/available-actions", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AvailableActions, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AvailableActions, parse_obj_as(type_=AvailableActions, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/balance_sheets/client.py b/src/merge/resources/accounting/resources/balance_sheets/client.py index c9d26d14..15291ade 100644 --- a/src/merge/resources/accounting/resources/balance_sheets/client.py +++ b/src/merge/resources/accounting/resources/balance_sheets/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.balance_sheet import BalanceSheet from ...types.paginated_balance_sheet_list import PaginatedBalanceSheetList @@ -108,9 +108,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedBalanceSheetList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedBalanceSheetList, parse_obj_as(type_=PaginatedBalanceSheetList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -163,9 +163,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BalanceSheet, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(BalanceSheet, parse_obj_as(type_=BalanceSheet, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -240,13 +240,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.balance_sheets.list() + + + async def main() -> None: + await client.accounting.balance_sheets.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/balance-sheets", @@ -266,9 +274,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedBalanceSheetList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedBalanceSheetList, parse_obj_as(type_=PaginatedBalanceSheetList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -305,15 +313,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.balance_sheets.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.balance_sheets.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/balance-sheets/{jsonable_encoder(id)}", @@ -321,9 +337,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BalanceSheet, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(BalanceSheet, parse_obj_as(type_=BalanceSheet, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/cash_flow_statements/client.py b/src/merge/resources/accounting/resources/cash_flow_statements/client.py index 71710bca..52bffafc 100644 --- a/src/merge/resources/accounting/resources/cash_flow_statements/client.py +++ b/src/merge/resources/accounting/resources/cash_flow_statements/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.cash_flow_statement import CashFlowStatement from ...types.paginated_cash_flow_statement_list import PaginatedCashFlowStatementList @@ -108,9 +108,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCashFlowStatementList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCashFlowStatementList, parse_obj_as(type_=PaginatedCashFlowStatementList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -163,9 +163,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CashFlowStatement, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CashFlowStatement, parse_obj_as(type_=CashFlowStatement, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -240,13 +240,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.cash_flow_statements.list() + + + async def main() -> None: + await client.accounting.cash_flow_statements.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/cash-flow-statements", @@ -266,9 +274,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCashFlowStatementList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCashFlowStatementList, parse_obj_as(type_=PaginatedCashFlowStatementList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -305,15 +313,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.cash_flow_statements.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.cash_flow_statements.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/cash-flow-statements/{jsonable_encoder(id)}", @@ -321,9 +337,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CashFlowStatement, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CashFlowStatement, parse_obj_as(type_=CashFlowStatement, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/company_info/client.py b/src/merge/resources/accounting/resources/company_info/client.py index 10b1ac0a..b47fc540 100644 --- a/src/merge/resources/accounting/resources/company_info/client.py +++ b/src/merge/resources/accounting/resources/company_info/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.company_info import CompanyInfo from ...types.paginated_company_info_list import PaginatedCompanyInfoList @@ -105,9 +105,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCompanyInfoList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCompanyInfoList, parse_obj_as(type_=PaginatedCompanyInfoList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -160,9 +160,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CompanyInfo, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CompanyInfo, parse_obj_as(type_=CompanyInfo, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -233,13 +233,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.company_info.list() + + + async def main() -> None: + await client.accounting.company_info.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/company-info", @@ -258,9 +266,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCompanyInfoList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCompanyInfoList, parse_obj_as(type_=PaginatedCompanyInfoList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -297,15 +305,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.company_info.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.company_info.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/company-info/{jsonable_encoder(id)}", @@ -313,9 +329,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CompanyInfo, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CompanyInfo, parse_obj_as(type_=CompanyInfo, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/contacts/client.py b/src/merge/resources/accounting/resources/contacts/client.py index 0453b111..ed8b0b7e 100644 --- a/src/merge/resources/accounting/resources/contacts/client.py +++ b/src/merge/resources/accounting/resources/contacts/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.contact import Contact from ...types.contact_request import ContactRequest @@ -136,9 +136,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedContactList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedContactList, parse_obj_as(type_=PaginatedContactList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -194,9 +194,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ContactResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ContactResponse, parse_obj_as(type_=ContactResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -262,9 +262,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Contact, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Contact, parse_obj_as(type_=Contact, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -297,9 +297,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "accounting/v1/contacts/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -390,13 +390,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.contacts.list() + + + async def main() -> None: + await client.accounting.contacts.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/contacts", @@ -420,9 +428,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedContactList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedContactList, parse_obj_as(type_=PaginatedContactList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -459,6 +467,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.accounting import ContactRequest @@ -466,9 +476,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.contacts.create( - model=ContactRequest(), - ) + + + async def main() -> None: + await client.accounting.contacts.create( + model=ContactRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/contacts", @@ -478,9 +494,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ContactResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ContactResponse, parse_obj_as(type_=ContactResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -525,15 +541,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.contacts.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.contacts.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/contacts/{jsonable_encoder(id)}", @@ -546,9 +570,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Contact, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Contact, parse_obj_as(type_=Contact, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -570,20 +594,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.contacts.meta_post_retrieve() + + + async def main() -> None: + await client.accounting.contacts.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/contacts/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/credit_notes/client.py b/src/merge/resources/accounting/resources/credit_notes/client.py index 554844ba..f12fc857 100644 --- a/src/merge/resources/accounting/resources/credit_notes/client.py +++ b/src/merge/resources/accounting/resources/credit_notes/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.credit_note import CreditNote from ...types.paginated_credit_note_list import PaginatedCreditNoteList @@ -138,9 +138,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCreditNoteList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCreditNoteList, parse_obj_as(type_=PaginatedCreditNoteList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -206,9 +206,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CreditNote, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CreditNote, parse_obj_as(type_=CreditNote, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -299,13 +299,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.credit_notes.list() + + + async def main() -> None: + await client.accounting.credit_notes.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/credit-notes", @@ -333,9 +341,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCreditNoteList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCreditNoteList, parse_obj_as(type_=PaginatedCreditNoteList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -380,15 +388,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.credit_notes.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.credit_notes.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/credit-notes/{jsonable_encoder(id)}", @@ -401,9 +417,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CreditNote, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CreditNote, parse_obj_as(type_=CreditNote, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/delete_account/client.py b/src/merge/resources/accounting/resources/delete_account/client.py index 389b267a..7b5ec9c6 100644 --- a/src/merge/resources/accounting/resources/delete_account/client.py +++ b/src/merge/resources/accounting/resources/delete_account/client.py @@ -38,9 +38,9 @@ def delete(self, *, request_options: typing.Optional[RequestOptions] = None) -> _response = self._client_wrapper.httpx_client.request( "accounting/v1/delete-account", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -66,20 +66,28 @@ async def delete(self, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.delete_account.delete() + + + async def main() -> None: + await client.accounting.delete_account.delete() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/delete-account", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/expenses/client.py b/src/merge/resources/accounting/resources/expenses/client.py index 22c0e0b2..87ab0041 100644 --- a/src/merge/resources/accounting/resources/expenses/client.py +++ b/src/merge/resources/accounting/resources/expenses/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.expense import Expense from ...types.expense_request import ExpenseRequest @@ -130,9 +130,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedExpenseList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedExpenseList, parse_obj_as(type_=PaginatedExpenseList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -188,9 +188,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExpenseResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ExpenseResponse, parse_obj_as(type_=ExpenseResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -243,9 +243,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Expense, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Expense, parse_obj_as(type_=Expense, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -278,9 +278,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "accounting/v1/expenses/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -363,13 +363,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.expenses.list() + + + async def main() -> None: + await client.accounting.expenses.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/expenses", @@ -395,9 +403,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedExpenseList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedExpenseList, parse_obj_as(type_=PaginatedExpenseList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -434,6 +442,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.accounting import ExpenseRequest @@ -441,9 +451,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.expenses.create( - model=ExpenseRequest(), - ) + + + async def main() -> None: + await client.accounting.expenses.create( + model=ExpenseRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/expenses", @@ -453,9 +469,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExpenseResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ExpenseResponse, parse_obj_as(type_=ExpenseResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -492,15 +508,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.expenses.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.expenses.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/expenses/{jsonable_encoder(id)}", @@ -508,9 +532,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Expense, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Expense, parse_obj_as(type_=Expense, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -532,20 +556,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.expenses.meta_post_retrieve() + + + async def main() -> None: + await client.accounting.expenses.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/expenses/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/field_mapping/client.py b/src/merge/resources/accounting/resources/field_mapping/client.py index 26cbe708..12924c3e 100644 --- a/src/merge/resources/accounting/resources/field_mapping/client.py +++ b/src/merge/resources/accounting/resources/field_mapping/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.external_target_field_api_response import ExternalTargetFieldApiResponse from ...types.field_mapping_api_instance_response import FieldMappingApiInstanceResponse @@ -50,9 +50,9 @@ def field_mappings_retrieve( _response = self._client_wrapper.httpx_client.request( "accounting/v1/field-mappings", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingApiInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingApiInstanceResponse, parse_obj_as(type_=FieldMappingApiInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -131,9 +131,9 @@ def field_mappings_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -174,9 +174,9 @@ def field_mappings_destroy( method="DELETE", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -238,9 +238,9 @@ def field_mappings_partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -288,9 +288,9 @@ def remote_fields_retrieve( params={"common_models": common_models, "include_example_values": include_example_values}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteFieldApiResponse, parse_obj_as(type_=RemoteFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -325,9 +325,9 @@ def target_fields_retrieve( _response = self._client_wrapper.httpx_client.request( "accounting/v1/target-fields", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExternalTargetFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ExternalTargetFieldApiResponse, parse_obj_as(type_=ExternalTargetFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -356,20 +356,28 @@ async def field_mappings_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.field_mapping.field_mappings_retrieve() + + + async def main() -> None: + await client.accounting.field_mapping.field_mappings_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/field-mappings", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingApiInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingApiInstanceResponse, parse_obj_as(type_=FieldMappingApiInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -419,20 +427,28 @@ async def field_mappings_create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.field_mapping.field_mappings_create( - target_field_name="example_target_field_name", - target_field_description="this is a example description of the target field", - remote_field_traversal_path=["example_remote_field"], - remote_method="GET", - remote_url_path="/example-url-path", - common_model_name="ExampleCommonModel", - ) + + + async def main() -> None: + await client.accounting.field_mapping.field_mappings_create( + target_field_name="example_target_field_name", + target_field_description="this is a example description of the target field", + remote_field_traversal_path=["example_remote_field"], + remote_method="GET", + remote_url_path="/example-url-path", + common_model_name="ExampleCommonModel", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/field-mappings", @@ -448,9 +464,9 @@ async def field_mappings_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -476,24 +492,32 @@ async def field_mappings_destroy( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.field_mapping.field_mappings_destroy( - field_mapping_id="field_mapping_id", - ) + + + async def main() -> None: + await client.accounting.field_mapping.field_mappings_destroy( + field_mapping_id="field_mapping_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/field-mappings/{jsonable_encoder(field_mapping_id)}", method="DELETE", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -534,15 +558,23 @@ async def field_mappings_partial_update( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.field_mapping.field_mappings_partial_update( - field_mapping_id="field_mapping_id", - ) + + + async def main() -> None: + await client.accounting.field_mapping.field_mappings_partial_update( + field_mapping_id="field_mapping_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/field-mappings/{jsonable_encoder(field_mapping_id)}", @@ -555,9 +587,9 @@ async def field_mappings_partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -591,13 +623,21 @@ async def remote_fields_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.field_mapping.remote_fields_retrieve() + + + async def main() -> None: + await client.accounting.field_mapping.remote_fields_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/remote-fields", @@ -605,9 +645,9 @@ async def remote_fields_retrieve( params={"common_models": common_models, "include_example_values": include_example_values}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteFieldApiResponse, parse_obj_as(type_=RemoteFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -631,20 +671,28 @@ async def target_fields_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.field_mapping.target_fields_retrieve() + + + async def main() -> None: + await client.accounting.field_mapping.target_fields_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/target-fields", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExternalTargetFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ExternalTargetFieldApiResponse, parse_obj_as(type_=ExternalTargetFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/force_resync/client.py b/src/merge/resources/accounting/resources/force_resync/client.py index c2e6aba6..a3536187 100644 --- a/src/merge/resources/accounting/resources/force_resync/client.py +++ b/src/merge/resources/accounting/resources/force_resync/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.sync_status import SyncStatus @@ -43,9 +43,9 @@ def sync_status_resync_create( _response = self._client_wrapper.httpx_client.request( "accounting/v1/sync-status/resync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[SyncStatus], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[SyncStatus], parse_obj_as(type_=typing.List[SyncStatus], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -74,20 +74,28 @@ async def sync_status_resync_create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.force_resync.sync_status_resync_create() + + + async def main() -> None: + await client.accounting.force_resync.sync_status_resync_create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/sync-status/resync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[SyncStatus], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[SyncStatus], parse_obj_as(type_=typing.List[SyncStatus], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/generate_key/client.py b/src/merge/resources/accounting/resources/generate_key/client.py index 2b75f549..56dbb7cf 100644 --- a/src/merge/resources/accounting/resources/generate_key/client.py +++ b/src/merge/resources/accounting/resources/generate_key/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.remote_key import RemoteKey @@ -49,9 +49,9 @@ def create(self, *, name: str, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "accounting/v1/generate-key", method="POST", json={"name": name}, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -81,22 +81,30 @@ async def create(self, *, name: str, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.generate_key.create( - name="Remote Deployment Key 1", - ) + + + async def main() -> None: + await client.accounting.generate_key.create( + name="Remote Deployment Key 1", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/generate-key", method="POST", json={"name": name}, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/income_statements/client.py b/src/merge/resources/accounting/resources/income_statements/client.py index da97a2e1..81966255 100644 --- a/src/merge/resources/accounting/resources/income_statements/client.py +++ b/src/merge/resources/accounting/resources/income_statements/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.income_statement import IncomeStatement from ...types.paginated_income_statement_list import PaginatedIncomeStatementList @@ -108,9 +108,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedIncomeStatementList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedIncomeStatementList, parse_obj_as(type_=PaginatedIncomeStatementList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -163,9 +163,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(IncomeStatement, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(IncomeStatement, parse_obj_as(type_=IncomeStatement, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -240,13 +240,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.income_statements.list() + + + async def main() -> None: + await client.accounting.income_statements.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/income-statements", @@ -266,9 +274,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedIncomeStatementList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedIncomeStatementList, parse_obj_as(type_=PaginatedIncomeStatementList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -305,15 +313,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.income_statements.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.income_statements.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/income-statements/{jsonable_encoder(id)}", @@ -321,9 +337,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(IncomeStatement, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(IncomeStatement, parse_obj_as(type_=IncomeStatement, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/invoices/client.py b/src/merge/resources/accounting/resources/invoices/client.py index d4e99d9c..a629d0c0 100644 --- a/src/merge/resources/accounting/resources/invoices/client.py +++ b/src/merge/resources/accounting/resources/invoices/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.invoice import Invoice from ...types.invoice_request import InvoiceRequest @@ -150,9 +150,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedInvoiceList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedInvoiceList, parse_obj_as(type_=PaginatedInvoiceList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -208,9 +208,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(InvoiceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(InvoiceResponse, parse_obj_as(type_=InvoiceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -276,9 +276,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Invoice, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Invoice, parse_obj_as(type_=Invoice, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -338,9 +338,9 @@ def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(InvoiceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(InvoiceResponse, parse_obj_as(type_=InvoiceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -377,9 +377,9 @@ def meta_patch_retrieve(self, id: str, *, request_options: typing.Optional[Reque _response = self._client_wrapper.httpx_client.request( f"accounting/v1/invoices/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -412,9 +412,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "accounting/v1/invoices/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -516,13 +516,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.invoices.list() + + + async def main() -> None: + await client.accounting.invoices.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/invoices", @@ -548,9 +556,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedInvoiceList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedInvoiceList, parse_obj_as(type_=PaginatedInvoiceList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -587,6 +595,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.accounting import InvoiceRequest @@ -594,9 +604,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.invoices.create( - model=InvoiceRequest(), - ) + + + async def main() -> None: + await client.accounting.invoices.create( + model=InvoiceRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/invoices", @@ -606,9 +622,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(InvoiceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(InvoiceResponse, parse_obj_as(type_=InvoiceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -653,15 +669,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.invoices.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.invoices.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/invoices/{jsonable_encoder(id)}", @@ -674,9 +698,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Invoice, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Invoice, parse_obj_as(type_=Invoice, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -716,6 +740,8 @@ async def partial_update( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.accounting import InvoiceRequest @@ -723,10 +749,16 @@ async def partial_update( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.invoices.partial_update( - id="id", - model=InvoiceRequest(), - ) + + + async def main() -> None: + await client.accounting.invoices.partial_update( + id="id", + model=InvoiceRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/invoices/{jsonable_encoder(id)}", @@ -736,9 +768,9 @@ async def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(InvoiceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(InvoiceResponse, parse_obj_as(type_=InvoiceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -764,22 +796,30 @@ async def meta_patch_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.invoices.meta_patch_retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.invoices.meta_patch_retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/invoices/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -801,20 +841,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.invoices.meta_post_retrieve() + + + async def main() -> None: + await client.accounting.invoices.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/invoices/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/issues/client.py b/src/merge/resources/accounting/resources/issues/client.py index 03bd1ef6..7e45581f 100644 --- a/src/merge/resources/accounting/resources/issues/client.py +++ b/src/merge/resources/accounting/resources/issues/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.issue import Issue from ...types.paginated_issue_list import PaginatedIssueList @@ -127,9 +127,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedIssueList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedIssueList, parse_obj_as(type_=PaginatedIssueList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -166,9 +166,9 @@ def retrieve(self, id: str, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( f"accounting/v1/issues/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Issue, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Issue, parse_obj_as(type_=Issue, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -251,13 +251,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.issues.list() + + + async def main() -> None: + await client.accounting.issues.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/issues", @@ -287,9 +295,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedIssueList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedIssueList, parse_obj_as(type_=PaginatedIssueList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -313,22 +321,30 @@ async def retrieve(self, id: str, *, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.issues.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.issues.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/issues/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Issue, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Issue, parse_obj_as(type_=Issue, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/items/client.py b/src/merge/resources/accounting/resources/items/client.py index 86396a9e..5f9519da 100644 --- a/src/merge/resources/accounting/resources/items/client.py +++ b/src/merge/resources/accounting/resources/items/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.item import Item from ...types.paginated_item_list import PaginatedItemList @@ -120,9 +120,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedItemList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedItemList, parse_obj_as(type_=PaginatedItemList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -188,9 +188,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Item, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Item, parse_obj_as(type_=Item, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -273,13 +273,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.items.list() + + + async def main() -> None: + await client.accounting.items.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/items", @@ -301,9 +309,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedItemList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedItemList, parse_obj_as(type_=PaginatedItemList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -348,15 +356,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.items.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.items.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/items/{jsonable_encoder(id)}", @@ -369,9 +385,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Item, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Item, parse_obj_as(type_=Item, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/journal_entries/client.py b/src/merge/resources/accounting/resources/journal_entries/client.py index a8ac58cb..baf40083 100644 --- a/src/merge/resources/accounting/resources/journal_entries/client.py +++ b/src/merge/resources/accounting/resources/journal_entries/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.journal_entry import JournalEntry from ...types.journal_entry_request import JournalEntryRequest @@ -130,9 +130,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedJournalEntryList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedJournalEntryList, parse_obj_as(type_=PaginatedJournalEntryList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -188,9 +188,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(JournalEntryResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(JournalEntryResponse, parse_obj_as(type_=JournalEntryResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -243,9 +243,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(JournalEntry, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(JournalEntry, parse_obj_as(type_=JournalEntry, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -278,9 +278,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "accounting/v1/journal-entries/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -363,13 +363,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.journal_entries.list() + + + async def main() -> None: + await client.accounting.journal_entries.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/journal-entries", @@ -395,9 +403,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedJournalEntryList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedJournalEntryList, parse_obj_as(type_=PaginatedJournalEntryList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -434,6 +442,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.accounting import JournalEntryRequest @@ -441,9 +451,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.journal_entries.create( - model=JournalEntryRequest(), - ) + + + async def main() -> None: + await client.accounting.journal_entries.create( + model=JournalEntryRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/journal-entries", @@ -453,9 +469,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(JournalEntryResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(JournalEntryResponse, parse_obj_as(type_=JournalEntryResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -492,15 +508,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.journal_entries.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.journal_entries.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/journal-entries/{jsonable_encoder(id)}", @@ -508,9 +532,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(JournalEntry, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(JournalEntry, parse_obj_as(type_=JournalEntry, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -532,20 +556,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.journal_entries.meta_post_retrieve() + + + async def main() -> None: + await client.accounting.journal_entries.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/journal-entries/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/link_token/client.py b/src/merge/resources/accounting/resources/link_token/client.py index 9c69fca1..8fb3667b 100644 --- a/src/merge/resources/accounting/resources/link_token/client.py +++ b/src/merge/resources/accounting/resources/link_token/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.categories_enum import CategoriesEnum from ...types.common_model_scopes_body_request import CommonModelScopesBodyRequest @@ -119,9 +119,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LinkToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(LinkToken, parse_obj_as(type_=LinkToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -198,6 +198,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.accounting import CategoriesEnum @@ -205,12 +207,18 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.link_token.create( - end_user_email_address="example@gmail.com", - end_user_organization_name="Test Organization", - end_user_origin_id="12345", - categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], - ) + + + async def main() -> None: + await client.accounting.link_token.create( + end_user_email_address="example@gmail.com", + end_user_organization_name="Test Organization", + end_user_origin_id="12345", + categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/link-token", @@ -231,9 +239,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LinkToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(LinkToken, parse_obj_as(type_=LinkToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/linked_accounts/client.py b/src/merge/resources/accounting/resources/linked_accounts/client.py index 131507fd..b9d6ce2e 100644 --- a/src/merge/resources/accounting/resources/linked_accounts/client.py +++ b/src/merge/resources/accounting/resources/linked_accounts/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_account_details_and_actions_list import PaginatedAccountDetailsAndActionsList from .types.linked_accounts_list_request_category import LinkedAccountsListRequestCategory @@ -122,9 +122,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountDetailsAndActionsList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountDetailsAndActionsList, parse_obj_as(type_=PaginatedAccountDetailsAndActionsList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -214,13 +214,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.linked_accounts.list() + + + async def main() -> None: + await client.accounting.linked_accounts.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/linked-accounts", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountDetailsAndActionsList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountDetailsAndActionsList, parse_obj_as(type_=PaginatedAccountDetailsAndActionsList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/passthrough/client.py b/src/merge/resources/accounting/resources/passthrough/client.py index a9fe343d..c4c9b33b 100644 --- a/src/merge/resources/accounting/resources/passthrough/client.py +++ b/src/merge/resources/accounting/resources/passthrough/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.data_passthrough_request import DataPassthroughRequest from ...types.remote_response import RemoteResponse @@ -55,9 +55,9 @@ def create( _response = self._client_wrapper.httpx_client.request( "accounting/v1/passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -88,6 +88,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.accounting import DataPassthroughRequest, MethodEnum @@ -95,19 +97,25 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.passthrough.create( - request=DataPassthroughRequest( - method=MethodEnum.GET, - path="/scooters", - ), - ) + + + async def main() -> None: + await client.accounting.passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/payments/client.py b/src/merge/resources/accounting/resources/payments/client.py index d6b66e83..6c5bc401 100644 --- a/src/merge/resources/accounting/resources/payments/client.py +++ b/src/merge/resources/accounting/resources/payments/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.meta_response import MetaResponse from ...types.paginated_payment_list import PaginatedPaymentList @@ -141,9 +141,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedPaymentList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedPaymentList, parse_obj_as(type_=PaginatedPaymentList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -199,9 +199,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaymentResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaymentResponse, parse_obj_as(type_=PaymentResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -254,9 +254,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Payment, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Payment, parse_obj_as(type_=Payment, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -316,9 +316,9 @@ def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaymentResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaymentResponse, parse_obj_as(type_=PaymentResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -355,9 +355,9 @@ def meta_patch_retrieve(self, id: str, *, request_options: typing.Optional[Reque _response = self._client_wrapper.httpx_client.request( f"accounting/v1/payments/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -390,9 +390,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "accounting/v1/payments/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -483,13 +483,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.payments.list() + + + async def main() -> None: + await client.accounting.payments.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/payments", @@ -517,9 +525,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedPaymentList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedPaymentList, parse_obj_as(type_=PaginatedPaymentList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -556,6 +564,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.accounting import PaymentRequest @@ -563,9 +573,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.payments.create( - model=PaymentRequest(), - ) + + + async def main() -> None: + await client.accounting.payments.create( + model=PaymentRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/payments", @@ -575,9 +591,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaymentResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaymentResponse, parse_obj_as(type_=PaymentResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -614,15 +630,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.payments.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.payments.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/payments/{jsonable_encoder(id)}", @@ -630,9 +654,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Payment, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Payment, parse_obj_as(type_=Payment, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -672,6 +696,8 @@ async def partial_update( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.accounting import PatchedPaymentRequest @@ -679,10 +705,16 @@ async def partial_update( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.payments.partial_update( - id="id", - model=PatchedPaymentRequest(), - ) + + + async def main() -> None: + await client.accounting.payments.partial_update( + id="id", + model=PatchedPaymentRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/payments/{jsonable_encoder(id)}", @@ -692,9 +724,9 @@ async def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaymentResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaymentResponse, parse_obj_as(type_=PaymentResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -720,22 +752,30 @@ async def meta_patch_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.payments.meta_patch_retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.payments.meta_patch_retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/payments/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -757,20 +797,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.payments.meta_post_retrieve() + + + async def main() -> None: + await client.accounting.payments.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/payments/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/phone_numbers/client.py b/src/merge/resources/accounting/resources/phone_numbers/client.py index 1e24bd42..df190cd3 100644 --- a/src/merge/resources/accounting/resources/phone_numbers/client.py +++ b/src/merge/resources/accounting/resources/phone_numbers/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.accounting_phone_number import AccountingPhoneNumber @@ -58,9 +58,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountingPhoneNumber, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountingPhoneNumber, parse_obj_as(type_=AccountingPhoneNumber, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -98,15 +98,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.phone_numbers.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.phone_numbers.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/phone-numbers/{jsonable_encoder(id)}", @@ -114,9 +122,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountingPhoneNumber, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountingPhoneNumber, parse_obj_as(type_=AccountingPhoneNumber, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/purchase_orders/client.py b/src/merge/resources/accounting/resources/purchase_orders/client.py index 6de5b0fa..f80a5f19 100644 --- a/src/merge/resources/accounting/resources/purchase_orders/client.py +++ b/src/merge/resources/accounting/resources/purchase_orders/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.meta_response import MetaResponse from ...types.paginated_purchase_order_list import PaginatedPurchaseOrderList @@ -136,9 +136,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedPurchaseOrderList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedPurchaseOrderList, parse_obj_as(type_=PaginatedPurchaseOrderList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -194,9 +194,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PurchaseOrderResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PurchaseOrderResponse, parse_obj_as(type_=PurchaseOrderResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -262,9 +262,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PurchaseOrder, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PurchaseOrder, parse_obj_as(type_=PurchaseOrder, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -297,9 +297,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "accounting/v1/purchase-orders/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -390,13 +390,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.purchase_orders.list() + + + async def main() -> None: + await client.accounting.purchase_orders.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/purchase-orders", @@ -420,9 +428,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedPurchaseOrderList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedPurchaseOrderList, parse_obj_as(type_=PaginatedPurchaseOrderList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -459,6 +467,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.accounting import PurchaseOrderRequest @@ -466,9 +476,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.purchase_orders.create( - model=PurchaseOrderRequest(), - ) + + + async def main() -> None: + await client.accounting.purchase_orders.create( + model=PurchaseOrderRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/purchase-orders", @@ -478,9 +494,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PurchaseOrderResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PurchaseOrderResponse, parse_obj_as(type_=PurchaseOrderResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -525,15 +541,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.purchase_orders.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.purchase_orders.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/purchase-orders/{jsonable_encoder(id)}", @@ -546,9 +570,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PurchaseOrder, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PurchaseOrder, parse_obj_as(type_=PurchaseOrder, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -570,20 +594,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.purchase_orders.meta_post_retrieve() + + + async def main() -> None: + await client.accounting.purchase_orders.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/purchase-orders/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/regenerate_key/client.py b/src/merge/resources/accounting/resources/regenerate_key/client.py index af66f7d1..5e42be6e 100644 --- a/src/merge/resources/accounting/resources/regenerate_key/client.py +++ b/src/merge/resources/accounting/resources/regenerate_key/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.remote_key import RemoteKey @@ -53,9 +53,9 @@ def create(self, *, name: str, request_options: typing.Optional[RequestOptions] request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -85,15 +85,23 @@ async def create(self, *, name: str, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.regenerate_key.create( - name="Remote Deployment Key 1", - ) + + + async def main() -> None: + await client.accounting.regenerate_key.create( + name="Remote Deployment Key 1", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/regenerate-key", @@ -102,9 +110,9 @@ async def create(self, *, name: str, request_options: typing.Optional[RequestOpt request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/scopes/client.py b/src/merge/resources/accounting/resources/scopes/client.py index 93018a58..d6721e33 100644 --- a/src/merge/resources/accounting/resources/scopes/client.py +++ b/src/merge/resources/accounting/resources/scopes/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.common_model_scope_api import CommonModelScopeApi from ...types.individual_common_model_scope_deserializer_request import IndividualCommonModelScopeDeserializerRequest @@ -47,9 +47,9 @@ def default_scopes_retrieve( _response = self._client_wrapper.httpx_client.request( "accounting/v1/default-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -84,9 +84,9 @@ def linked_account_scopes_retrieve( _response = self._client_wrapper.httpx_client.request( "accounting/v1/linked-account-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -157,9 +157,9 @@ def linked_account_scopes_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -188,20 +188,28 @@ async def default_scopes_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.scopes.default_scopes_retrieve() + + + async def main() -> None: + await client.accounting.scopes.default_scopes_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/default-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -225,20 +233,28 @@ async def linked_account_scopes_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.scopes.linked_account_scopes_retrieve() + + + async def main() -> None: + await client.accounting.scopes.linked_account_scopes_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/linked-account-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -268,6 +284,8 @@ async def linked_account_scopes_create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.accounting import ( IndividualCommonModelScopeDeserializerRequest, @@ -278,29 +296,35 @@ async def linked_account_scopes_create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.scopes.linked_account_scopes_create( - common_models=[ - IndividualCommonModelScopeDeserializerRequest( - model_name="Employee", - model_permissions={ - "READ": ModelPermissionDeserializerRequest( - is_enabled=True, - ), - "WRITE": ModelPermissionDeserializerRequest( - is_enabled=False, - ), - }, - ), - IndividualCommonModelScopeDeserializerRequest( - model_name="Benefit", - model_permissions={ - "WRITE": ModelPermissionDeserializerRequest( - is_enabled=False, - ) - }, - ), - ], - ) + + + async def main() -> None: + await client.accounting.scopes.linked_account_scopes_create( + common_models=[ + IndividualCommonModelScopeDeserializerRequest( + model_name="Employee", + model_permissions={ + "READ": ModelPermissionDeserializerRequest( + is_enabled=True, + ), + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ), + }, + ), + IndividualCommonModelScopeDeserializerRequest( + model_name="Benefit", + model_permissions={ + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ) + }, + ), + ], + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/linked-account-scopes", @@ -309,9 +333,9 @@ async def linked_account_scopes_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/sync_status/client.py b/src/merge/resources/accounting/resources/sync_status/client.py index 939f6209..426d673f 100644 --- a/src/merge/resources/accounting/resources/sync_status/client.py +++ b/src/merge/resources/accounting/resources/sync_status/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_sync_status_list import PaginatedSyncStatusList @@ -56,9 +56,9 @@ def list( params={"cursor": cursor, "page_size": page_size}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedSyncStatusList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedSyncStatusList, parse_obj_as(type_=PaginatedSyncStatusList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -97,13 +97,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.sync_status.list() + + + async def main() -> None: + await client.accounting.sync_status.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/sync-status", @@ -111,9 +119,9 @@ async def list( params={"cursor": cursor, "page_size": page_size}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedSyncStatusList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedSyncStatusList, parse_obj_as(type_=PaginatedSyncStatusList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/tax_rates/client.py b/src/merge/resources/accounting/resources/tax_rates/client.py index 099d1b4c..3a2860f0 100644 --- a/src/merge/resources/accounting/resources/tax_rates/client.py +++ b/src/merge/resources/accounting/resources/tax_rates/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_tax_rate_list import PaginatedTaxRateList from ...types.tax_rate import TaxRate @@ -108,9 +108,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTaxRateList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTaxRateList, parse_obj_as(type_=PaginatedTaxRateList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -163,9 +163,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TaxRate, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TaxRate, parse_obj_as(type_=TaxRate, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -240,13 +240,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.tax_rates.list() + + + async def main() -> None: + await client.accounting.tax_rates.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/tax-rates", @@ -266,9 +274,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTaxRateList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTaxRateList, parse_obj_as(type_=PaginatedTaxRateList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -305,15 +313,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.tax_rates.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.tax_rates.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/tax-rates/{jsonable_encoder(id)}", @@ -321,9 +337,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TaxRate, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TaxRate, parse_obj_as(type_=TaxRate, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/tracking_categories/client.py b/src/merge/resources/accounting/resources/tracking_categories/client.py index 6c98fad9..f9105983 100644 --- a/src/merge/resources/accounting/resources/tracking_categories/client.py +++ b/src/merge/resources/accounting/resources/tracking_categories/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_tracking_category_list import PaginatedTrackingCategoryList from ...types.tracking_category import TrackingCategory @@ -118,9 +118,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTrackingCategoryList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTrackingCategoryList, parse_obj_as(type_=PaginatedTrackingCategoryList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -186,9 +186,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TrackingCategory, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TrackingCategory, parse_obj_as(type_=TrackingCategory, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -271,13 +271,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.tracking_categories.list() + + + async def main() -> None: + await client.accounting.tracking_categories.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/tracking-categories", @@ -299,9 +307,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTrackingCategoryList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTrackingCategoryList, parse_obj_as(type_=PaginatedTrackingCategoryList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -346,15 +354,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.tracking_categories.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.tracking_categories.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/tracking-categories/{jsonable_encoder(id)}", @@ -367,9 +383,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TrackingCategory, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TrackingCategory, parse_obj_as(type_=TrackingCategory, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/transactions/client.py b/src/merge/resources/accounting/resources/transactions/client.py index 7b1225b2..02b829f8 100644 --- a/src/merge/resources/accounting/resources/transactions/client.py +++ b/src/merge/resources/accounting/resources/transactions/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_transaction_list import PaginatedTransactionList from ...types.transaction import Transaction @@ -124,9 +124,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTransactionList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTransactionList, parse_obj_as(type_=PaginatedTransactionList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -179,9 +179,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Transaction, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Transaction, parse_obj_as(type_=Transaction, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -264,13 +264,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.transactions.list() + + + async def main() -> None: + await client.accounting.transactions.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/transactions", @@ -296,9 +304,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTransactionList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTransactionList, parse_obj_as(type_=PaginatedTransactionList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -335,15 +343,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.transactions.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.transactions.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/transactions/{jsonable_encoder(id)}", @@ -351,9 +367,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Transaction, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Transaction, parse_obj_as(type_=Transaction, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/vendor_credits/client.py b/src/merge/resources/accounting/resources/vendor_credits/client.py index fb507d55..a8471ae2 100644 --- a/src/merge/resources/accounting/resources/vendor_credits/client.py +++ b/src/merge/resources/accounting/resources/vendor_credits/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_vendor_credit_list import PaginatedVendorCreditList from ...types.vendor_credit import VendorCredit @@ -124,9 +124,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedVendorCreditList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedVendorCreditList, parse_obj_as(type_=PaginatedVendorCreditList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -179,9 +179,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(VendorCredit, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(VendorCredit, parse_obj_as(type_=VendorCredit, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -264,13 +264,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.vendor_credits.list() + + + async def main() -> None: + await client.accounting.vendor_credits.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/vendor-credits", @@ -296,9 +304,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedVendorCreditList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedVendorCreditList, parse_obj_as(type_=PaginatedVendorCreditList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -335,15 +343,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.vendor_credits.retrieve( - id="id", - ) + + + async def main() -> None: + await client.accounting.vendor_credits.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"accounting/v1/vendor-credits/{jsonable_encoder(id)}", @@ -351,9 +367,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(VendorCredit, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(VendorCredit, parse_obj_as(type_=VendorCredit, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/resources/webhook_receivers/client.py b/src/merge/resources/accounting/resources/webhook_receivers/client.py index 10025f02..9db5178b 100644 --- a/src/merge/resources/accounting/resources/webhook_receivers/client.py +++ b/src/merge/resources/accounting/resources/webhook_receivers/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.webhook_receiver import WebhookReceiver @@ -44,9 +44,9 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty _response = self._client_wrapper.httpx_client.request( "accounting/v1/webhook-receivers", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[WebhookReceiver], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[WebhookReceiver], parse_obj_as(type_=typing.List[WebhookReceiver], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -99,9 +99,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(WebhookReceiver, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(WebhookReceiver, parse_obj_as(type_=WebhookReceiver, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -128,20 +128,28 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.webhook_receivers.list() + + + async def main() -> None: + await client.accounting.webhook_receivers.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/webhook-receivers", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[WebhookReceiver], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[WebhookReceiver], parse_obj_as(type_=typing.List[WebhookReceiver], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -176,16 +184,24 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.accounting.webhook_receivers.create( - event="event", - is_active=True, - ) + + + async def main() -> None: + await client.accounting.webhook_receivers.create( + event="event", + is_active=True, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "accounting/v1/webhook-receivers", @@ -194,9 +210,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(WebhookReceiver, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(WebhookReceiver, parse_obj_as(type_=WebhookReceiver, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/accounting/types/account.py b/src/merge/resources/accounting/types/account.py index c983b39b..8a80f75a 100644 --- a/src/merge/resources/accounting/types/account.py +++ b/src/merge/resources/accounting/types/account.py @@ -3,15 +3,16 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_classification import AccountClassification from .account_currency import AccountCurrency from .account_status import AccountStatus from .remote_data import RemoteData -class Account(pydantic_v1.BaseModel): +class Account(UniversalBaseModel): """ # The Account Object @@ -33,32 +34,32 @@ class Account(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The account's name. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The account's description. """ - classification: typing.Optional[AccountClassification] = pydantic_v1.Field() + classification: typing.Optional[AccountClassification] = pydantic.Field() """ The account's broadest grouping. @@ -69,12 +70,12 @@ class Account(pydantic_v1.BaseModel): - `REVENUE` - REVENUE """ - type: typing.Optional[str] = pydantic_v1.Field() + type: typing.Optional[str] = pydantic.Field() """ The account's type is a narrower and more specific grouping within the account's classification. """ - status: typing.Optional[AccountStatus] = pydantic_v1.Field() + status: typing.Optional[AccountStatus] = pydantic.Field() """ The account's status. @@ -83,12 +84,12 @@ class Account(pydantic_v1.BaseModel): - `INACTIVE` - INACTIVE """ - current_balance: typing.Optional[float] = pydantic_v1.Field() + current_balance: typing.Optional[float] = pydantic.Field() """ The account's current balance. """ - currency: typing.Optional[AccountCurrency] = pydantic_v1.Field() + currency: typing.Optional[AccountCurrency] = pydantic.Field() """ The account's currency. @@ -400,22 +401,22 @@ class Account(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - account_number: typing.Optional[str] = pydantic_v1.Field() + account_number: typing.Optional[str] = pydantic.Field() """ The account's number. """ - parent_account: typing.Optional[str] = pydantic_v1.Field() + parent_account: typing.Optional[str] = pydantic.Field() """ ID of the parent account. """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the account belongs to. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -423,20 +424,11 @@ class Account(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/account_details.py b/src/merge/resources/accounting/types/account_details.py index 81f9587b..58cd348a 100644 --- a/src/merge/resources/accounting/types/account_details.py +++ b/src/merge/resources/accounting/types/account_details.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .category_enum import CategoryEnum -class AccountDetails(pydantic_v1.BaseModel): +class AccountDetails(UniversalBaseModel): id: typing.Optional[str] integration: typing.Optional[str] integration_slug: typing.Optional[str] @@ -18,27 +18,18 @@ class AccountDetails(pydantic_v1.BaseModel): end_user_email_address: typing.Optional[str] status: typing.Optional[str] webhook_listener_url: typing.Optional[str] - is_duplicate: typing.Optional[bool] = pydantic_v1.Field() + is_duplicate: typing.Optional[bool] = pydantic.Field() """ Whether a Production Linked Account's credentials match another existing Production Linked Account. This field is `null` for Test Linked Accounts, incomplete Production Linked Accounts, and ignored duplicate Production Linked Account sets. """ account_type: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/account_details_and_actions.py b/src/merge/resources/accounting/types/account_details_and_actions.py index 6a1b2874..ffc38f58 100644 --- a/src/merge/resources/accounting/types/account_details_and_actions.py +++ b/src/merge/resources/accounting/types/account_details_and_actions.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_details_and_actions_integration import AccountDetailsAndActionsIntegration from .account_details_and_actions_status_enum import AccountDetailsAndActionsStatusEnum from .category_enum import CategoryEnum -class AccountDetailsAndActions(pydantic_v1.BaseModel): +class AccountDetailsAndActions(UniversalBaseModel): """ # The LinkedAccount Object @@ -30,13 +30,13 @@ class AccountDetailsAndActions(pydantic_v1.BaseModel): end_user_origin_id: typing.Optional[str] end_user_organization_name: str end_user_email_address: str - subdomain: typing.Optional[str] = pydantic_v1.Field() + subdomain: typing.Optional[str] = pydantic.Field() """ The tenant or domain the customer has provided access to. """ webhook_listener_url: str - is_duplicate: typing.Optional[bool] = pydantic_v1.Field() + is_duplicate: typing.Optional[bool] = pydantic.Field() """ Whether a Production Linked Account's credentials match another existing Production Linked Account. This field is `null` for Test Linked Accounts, incomplete Production Linked Accounts, and ignored duplicate Production Linked Account sets. """ @@ -44,20 +44,11 @@ class AccountDetailsAndActions(pydantic_v1.BaseModel): integration: typing.Optional[AccountDetailsAndActionsIntegration] account_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/account_details_and_actions_integration.py b/src/merge/resources/accounting/types/account_details_and_actions_integration.py index 7c300a2b..8aa63726 100644 --- a/src/merge/resources/accounting/types/account_details_and_actions_integration.py +++ b/src/merge/resources/accounting/types/account_details_and_actions_integration.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .categories_enum import CategoriesEnum from .model_operation import ModelOperation -class AccountDetailsAndActionsIntegration(pydantic_v1.BaseModel): +class AccountDetailsAndActionsIntegration(UniversalBaseModel): name: str categories: typing.List[CategoriesEnum] image: typing.Optional[str] @@ -19,20 +19,11 @@ class AccountDetailsAndActionsIntegration(pydantic_v1.BaseModel): passthrough_available: bool available_model_operations: typing.Optional[typing.List[ModelOperation]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/account_integration.py b/src/merge/resources/accounting/types/account_integration.py index 57006e35..7a8a27ab 100644 --- a/src/merge/resources/accounting/types/account_integration.py +++ b/src/merge/resources/accounting/types/account_integration.py @@ -1,69 +1,60 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .categories_enum import CategoriesEnum -class AccountIntegration(pydantic_v1.BaseModel): - name: str = pydantic_v1.Field() +class AccountIntegration(UniversalBaseModel): + name: str = pydantic.Field() """ Company name. """ - categories: typing.Optional[typing.List[CategoriesEnum]] = pydantic_v1.Field() + categories: typing.Optional[typing.List[CategoriesEnum]] = pydantic.Field() """ Category or categories this integration belongs to. Multiple categories should be comma separated, i.e. [ats, hris]. """ - image: typing.Optional[str] = pydantic_v1.Field() + image: typing.Optional[str] = pydantic.Field() """ Company logo in rectangular shape. Upload an image with a clear background. """ - square_image: typing.Optional[str] = pydantic_v1.Field() + square_image: typing.Optional[str] = pydantic.Field() """ Company logo in square shape. Upload an image with a white background. """ - color: typing.Optional[str] = pydantic_v1.Field() + color: typing.Optional[str] = pydantic.Field() """ The color of this integration used for buttons and text throughout the app and landing pages. Choose a darker, saturated color. """ slug: typing.Optional[str] - api_endpoints_to_documentation_urls: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + api_endpoints_to_documentation_urls: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ Mapping of API endpoints to documentation urls for support. Example: {'GET': [['/common-model-scopes', 'https://docs.merge.dev/accounting/common-model-scopes/#common_model_scopes_retrieve'],['/common-model-actions', 'https://docs.merge.dev/accounting/common-model-actions/#common_model_actions_retrieve']], 'POST': []} """ - webhook_setup_guide_url: typing.Optional[str] = pydantic_v1.Field() + webhook_setup_guide_url: typing.Optional[str] = pydantic.Field() """ Setup guide URL for third party webhook creation. Exposed in Merge Docs. """ - category_beta_status: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + category_beta_status: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ Category or categories this integration is in beta status for. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/account_request.py b/src/merge/resources/accounting/types/account_request.py index ad855d9e..5aaa7e4d 100644 --- a/src/merge/resources/accounting/types/account_request.py +++ b/src/merge/resources/accounting/types/account_request.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_request_classification import AccountRequestClassification from .account_request_currency import AccountRequestCurrency from .account_request_status import AccountRequestStatus -class AccountRequest(pydantic_v1.BaseModel): +class AccountRequest(UniversalBaseModel): """ # The Account Object @@ -31,17 +31,17 @@ class AccountRequest(pydantic_v1.BaseModel): Fetch from the `LIST Accounts` endpoint and view a company's accounts. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The account's name. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The account's description. """ - classification: typing.Optional[AccountRequestClassification] = pydantic_v1.Field() + classification: typing.Optional[AccountRequestClassification] = pydantic.Field() """ The account's broadest grouping. @@ -52,12 +52,12 @@ class AccountRequest(pydantic_v1.BaseModel): - `REVENUE` - REVENUE """ - type: typing.Optional[str] = pydantic_v1.Field() + type: typing.Optional[str] = pydantic.Field() """ The account's type is a narrower and more specific grouping within the account's classification. """ - status: typing.Optional[AccountRequestStatus] = pydantic_v1.Field() + status: typing.Optional[AccountRequestStatus] = pydantic.Field() """ The account's status. @@ -66,12 +66,12 @@ class AccountRequest(pydantic_v1.BaseModel): - `INACTIVE` - INACTIVE """ - current_balance: typing.Optional[float] = pydantic_v1.Field() + current_balance: typing.Optional[float] = pydantic.Field() """ The account's current balance. """ - currency: typing.Optional[AccountRequestCurrency] = pydantic_v1.Field() + currency: typing.Optional[AccountRequestCurrency] = pydantic.Field() """ The account's currency. @@ -383,17 +383,17 @@ class AccountRequest(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - account_number: typing.Optional[str] = pydantic_v1.Field() + account_number: typing.Optional[str] = pydantic.Field() """ The account's number. """ - parent_account: typing.Optional[str] = pydantic_v1.Field() + parent_account: typing.Optional[str] = pydantic.Field() """ ID of the parent account. """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the account belongs to. """ @@ -401,20 +401,11 @@ class AccountRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/account_response.py b/src/merge/resources/accounting/types/account_response.py index 187c7db0..d683b74f 100644 --- a/src/merge/resources/accounting/types/account_response.py +++ b/src/merge/resources/accounting/types/account_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account import Account from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .warning_validation_problem import WarningValidationProblem -class AccountResponse(pydantic_v1.BaseModel): +class AccountResponse(UniversalBaseModel): model: Account warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/account_token.py b/src/merge/resources/accounting/types/account_token.py index 4794fea3..c280c7cb 100644 --- a/src/merge/resources/accounting/types/account_token.py +++ b/src/merge/resources/accounting/types/account_token.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_integration import AccountIntegration -class AccountToken(pydantic_v1.BaseModel): +class AccountToken(UniversalBaseModel): account_token: str integration: AccountIntegration - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/accounting_attachment.py b/src/merge/resources/accounting/types/accounting_attachment.py index b184c02c..972f2a8c 100644 --- a/src/merge/resources/accounting/types/accounting_attachment.py +++ b/src/merge/resources/accounting/types/accounting_attachment.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData -class AccountingAttachment(pydantic_v1.BaseModel): +class AccountingAttachment(UniversalBaseModel): """ # The Accounting Attachment Object @@ -22,37 +23,37 @@ class AccountingAttachment(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - file_name: typing.Optional[str] = pydantic_v1.Field() + file_name: typing.Optional[str] = pydantic.Field() """ The attachment's name. """ - file_url: typing.Optional[str] = pydantic_v1.Field() + file_url: typing.Optional[str] = pydantic.Field() """ The attachment's url. """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the accounting attachment belongs to. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -60,20 +61,11 @@ class AccountingAttachment(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/accounting_attachment_request.py b/src/merge/resources/accounting/types/accounting_attachment_request.py index fdd405d1..1e4185c6 100644 --- a/src/merge/resources/accounting/types/accounting_attachment_request.py +++ b/src/merge/resources/accounting/types/accounting_attachment_request.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class AccountingAttachmentRequest(pydantic_v1.BaseModel): + +class AccountingAttachmentRequest(UniversalBaseModel): """ # The Accounting Attachment Object @@ -20,17 +20,17 @@ class AccountingAttachmentRequest(pydantic_v1.BaseModel): Fetch from the `LIST AccountingAttachments` endpoint and view a company's attachments. """ - file_name: typing.Optional[str] = pydantic_v1.Field() + file_name: typing.Optional[str] = pydantic.Field() """ The attachment's name. """ - file_url: typing.Optional[str] = pydantic_v1.Field() + file_url: typing.Optional[str] = pydantic.Field() """ The attachment's url. """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the accounting attachment belongs to. """ @@ -38,20 +38,11 @@ class AccountingAttachmentRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/accounting_attachment_response.py b/src/merge/resources/accounting/types/accounting_attachment_response.py index b3863d76..ea05b7c5 100644 --- a/src/merge/resources/accounting/types/accounting_attachment_response.py +++ b/src/merge/resources/accounting/types/accounting_attachment_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .accounting_attachment import AccountingAttachment from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .warning_validation_problem import WarningValidationProblem -class AccountingAttachmentResponse(pydantic_v1.BaseModel): +class AccountingAttachmentResponse(UniversalBaseModel): model: AccountingAttachment warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/accounting_period.py b/src/merge/resources/accounting/types/accounting_period.py index 8a203e7e..9689029b 100644 --- a/src/merge/resources/accounting/types/accounting_period.py +++ b/src/merge/resources/accounting/types/accounting_period.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .accounting_period_status import AccountingPeriodStatus -class AccountingPeriod(pydantic_v1.BaseModel): +class AccountingPeriod(UniversalBaseModel): """ # The AccountingPeriod Object @@ -22,46 +23,37 @@ class AccountingPeriod(pydantic_v1.BaseModel): """ id: typing.Optional[str] - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - start_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_date: typing.Optional[dt.datetime] = pydantic.Field() """ Beginning date of the period """ - end_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + end_date: typing.Optional[dt.datetime] = pydantic.Field() """ End date of the period """ status: typing.Optional[AccountingPeriodStatus] - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ Name of the accounting period. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/accounting_phone_number.py b/src/merge/resources/accounting/types/accounting_phone_number.py index 6a84d448..692f96c2 100644 --- a/src/merge/resources/accounting/types/accounting_phone_number.py +++ b/src/merge/resources/accounting/types/accounting_phone_number.py @@ -3,11 +3,12 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class AccountingPhoneNumber(pydantic_v1.BaseModel): + +class AccountingPhoneNumber(UniversalBaseModel): """ # The AccountingPhoneNumber Object @@ -20,40 +21,31 @@ class AccountingPhoneNumber(pydantic_v1.BaseModel): Fetch from the `GET CompanyInfo` endpoint and view the company's phone numbers. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - number: typing.Optional[str] = pydantic_v1.Field() + number: typing.Optional[str] = pydantic.Field() """ The phone number. """ - type: typing.Optional[str] = pydantic_v1.Field() + type: typing.Optional[str] = pydantic.Field() """ The phone number's type. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/accounting_phone_number_request.py b/src/merge/resources/accounting/types/accounting_phone_number_request.py index 64fc4f71..59b3b9b5 100644 --- a/src/merge/resources/accounting/types/accounting_phone_number_request.py +++ b/src/merge/resources/accounting/types/accounting_phone_number_request.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class AccountingPhoneNumberRequest(pydantic_v1.BaseModel): + +class AccountingPhoneNumberRequest(UniversalBaseModel): """ # The AccountingPhoneNumber Object @@ -20,12 +20,12 @@ class AccountingPhoneNumberRequest(pydantic_v1.BaseModel): Fetch from the `GET CompanyInfo` endpoint and view the company's phone numbers. """ - number: typing.Optional[str] = pydantic_v1.Field() + number: typing.Optional[str] = pydantic.Field() """ The phone number. """ - type: typing.Optional[str] = pydantic_v1.Field() + type: typing.Optional[str] = pydantic.Field() """ The phone number's type. """ @@ -33,20 +33,11 @@ class AccountingPhoneNumberRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/address.py b/src/merge/resources/accounting/types/address.py index aae530c6..a9b0553c 100644 --- a/src/merge/resources/accounting/types/address.py +++ b/src/merge/resources/accounting/types/address.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .address_country import AddressCountry from .address_type import AddressType -class Address(pydantic_v1.BaseModel): +class Address(UniversalBaseModel): """ # The Address Object @@ -22,17 +23,17 @@ class Address(pydantic_v1.BaseModel): Fetch from the `GET CompanyInfo` endpoint and view the company's addresses. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - type: typing.Optional[AddressType] = pydantic_v1.Field() + type: typing.Optional[AddressType] = pydantic.Field() """ The address type. @@ -40,28 +41,28 @@ class Address(pydantic_v1.BaseModel): - `SHIPPING` - SHIPPING """ - street_1: typing.Optional[str] = pydantic_v1.Field() + street_1: typing.Optional[str] = pydantic.Field() """ Line 1 of the address's street. """ - street_2: typing.Optional[str] = pydantic_v1.Field() + street_2: typing.Optional[str] = pydantic.Field() """ Line 2 of the address's street. """ - city: typing.Optional[str] = pydantic_v1.Field() + city: typing.Optional[str] = pydantic.Field() """ The address's city. """ state: typing.Optional[typing.Any] - country_subdivision: typing.Optional[str] = pydantic_v1.Field() + country_subdivision: typing.Optional[str] = pydantic.Field() """ The address's state or region. """ - country: typing.Optional[AddressCountry] = pydantic_v1.Field() + country: typing.Optional[AddressCountry] = pydantic.Field() """ The address's country. @@ -316,25 +317,16 @@ class Address(pydantic_v1.BaseModel): - `ZW` - Zimbabwe """ - zip_code: typing.Optional[str] = pydantic_v1.Field() + zip_code: typing.Optional[str] = pydantic.Field() """ The address's zip code. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/address_request.py b/src/merge/resources/accounting/types/address_request.py index cc69afee..565831f2 100644 --- a/src/merge/resources/accounting/types/address_request.py +++ b/src/merge/resources/accounting/types/address_request.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .address_request_country import AddressRequestCountry from .address_request_type import AddressRequestType -class AddressRequest(pydantic_v1.BaseModel): +class AddressRequest(UniversalBaseModel): """ # The Address Object @@ -22,7 +22,7 @@ class AddressRequest(pydantic_v1.BaseModel): Fetch from the `GET CompanyInfo` endpoint and view the company's addresses. """ - type: typing.Optional[AddressRequestType] = pydantic_v1.Field() + type: typing.Optional[AddressRequestType] = pydantic.Field() """ The address type. @@ -30,27 +30,27 @@ class AddressRequest(pydantic_v1.BaseModel): - `SHIPPING` - SHIPPING """ - street_1: typing.Optional[str] = pydantic_v1.Field() + street_1: typing.Optional[str] = pydantic.Field() """ Line 1 of the address's street. """ - street_2: typing.Optional[str] = pydantic_v1.Field() + street_2: typing.Optional[str] = pydantic.Field() """ Line 2 of the address's street. """ - city: typing.Optional[str] = pydantic_v1.Field() + city: typing.Optional[str] = pydantic.Field() """ The address's city. """ - country_subdivision: typing.Optional[str] = pydantic_v1.Field() + country_subdivision: typing.Optional[str] = pydantic.Field() """ The address's state or region. """ - country: typing.Optional[AddressRequestCountry] = pydantic_v1.Field() + country: typing.Optional[AddressRequestCountry] = pydantic.Field() """ The address's country. @@ -305,7 +305,7 @@ class AddressRequest(pydantic_v1.BaseModel): - `ZW` - Zimbabwe """ - zip_code: typing.Optional[str] = pydantic_v1.Field() + zip_code: typing.Optional[str] = pydantic.Field() """ The address's zip code. """ @@ -313,20 +313,11 @@ class AddressRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/advanced_metadata.py b/src/merge/resources/accounting/types/advanced_metadata.py index 2aa7d1d6..5b0ec9e2 100644 --- a/src/merge/resources/accounting/types/advanced_metadata.py +++ b/src/merge/resources/accounting/types/advanced_metadata.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class AdvancedMetadata(pydantic_v1.BaseModel): + +class AdvancedMetadata(UniversalBaseModel): id: str display_name: typing.Optional[str] description: typing.Optional[str] @@ -15,20 +15,11 @@ class AdvancedMetadata(pydantic_v1.BaseModel): is_custom: typing.Optional[bool] field_choices: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/async_passthrough_reciept.py b/src/merge/resources/accounting/types/async_passthrough_reciept.py index 2cc33210..f2144443 100644 --- a/src/merge/resources/accounting/types/async_passthrough_reciept.py +++ b/src/merge/resources/accounting/types/async_passthrough_reciept.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class AsyncPassthroughReciept(pydantic_v1.BaseModel): - async_passthrough_receipt_id: str - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class AsyncPassthroughReciept(UniversalBaseModel): + async_passthrough_receipt_id: str - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/audit_log_event.py b/src/merge/resources/accounting/types/audit_log_event.py index 43be24a5..c0b4e43a 100644 --- a/src/merge/resources/accounting/types/audit_log_event.py +++ b/src/merge/resources/accounting/types/audit_log_event.py @@ -3,25 +3,26 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .audit_log_event_event_type import AuditLogEventEventType from .audit_log_event_role import AuditLogEventRole -class AuditLogEvent(pydantic_v1.BaseModel): +class AuditLogEvent(UniversalBaseModel): id: typing.Optional[str] - user_name: typing.Optional[str] = pydantic_v1.Field() + user_name: typing.Optional[str] = pydantic.Field() """ The User's full name at the time of this Event occurring. """ - user_email: typing.Optional[str] = pydantic_v1.Field() + user_email: typing.Optional[str] = pydantic.Field() """ The User's email at the time of this Event occurring. """ - role: AuditLogEventRole = pydantic_v1.Field() + role: AuditLogEventRole = pydantic.Field() """ Designates the role of the user (or SYSTEM/API if action not taken by a user) at the time of this Event occurring. @@ -34,7 +35,7 @@ class AuditLogEvent(pydantic_v1.BaseModel): """ ip_address: str - event_type: AuditLogEventEventType = pydantic_v1.Field() + event_type: AuditLogEventEventType = pydantic.Field() """ Designates the type of event that occurred. @@ -80,20 +81,11 @@ class AuditLogEvent(pydantic_v1.BaseModel): event_description: str created_at: typing.Optional[dt.datetime] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/available_actions.py b/src/merge/resources/accounting/types/available_actions.py index bbd94581..1f1d424c 100644 --- a/src/merge/resources/accounting/types/available_actions.py +++ b/src/merge/resources/accounting/types/available_actions.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_integration import AccountIntegration from .model_operation import ModelOperation -class AvailableActions(pydantic_v1.BaseModel): +class AvailableActions(UniversalBaseModel): """ # The AvailableActions Object @@ -26,20 +26,11 @@ class AvailableActions(pydantic_v1.BaseModel): passthrough_available: bool available_model_operations: typing.Optional[typing.List[ModelOperation]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/balance_sheet.py b/src/merge/resources/accounting/types/balance_sheet.py index bb0aeffd..629d46d3 100644 --- a/src/merge/resources/accounting/types/balance_sheet.py +++ b/src/merge/resources/accounting/types/balance_sheet.py @@ -3,15 +3,16 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .balance_sheet_company import BalanceSheetCompany from .balance_sheet_currency import BalanceSheetCurrency from .remote_data import RemoteData from .report_item import ReportItem -class BalanceSheet(pydantic_v1.BaseModel): +class BalanceSheet(UniversalBaseModel): """ # The BalanceSheet Object @@ -25,27 +26,27 @@ class BalanceSheet(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The balance sheet's name. """ - currency: typing.Optional[BalanceSheetCurrency] = pydantic_v1.Field() + currency: typing.Optional[BalanceSheetCurrency] = pydantic.Field() """ The balance sheet's currency. @@ -357,17 +358,17 @@ class BalanceSheet(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - company: typing.Optional[BalanceSheetCompany] = pydantic_v1.Field() + company: typing.Optional[BalanceSheetCompany] = pydantic.Field() """ `Company` object for the given `BalanceSheet` object. """ - date: typing.Optional[dt.datetime] = pydantic_v1.Field() + date: typing.Optional[dt.datetime] = pydantic.Field() """ The balance sheet's date. The balance sheet data will reflect the company's financial position this point in time. """ - net_assets: typing.Optional[float] = pydantic_v1.Field() + net_assets: typing.Optional[float] = pydantic.Field() """ The balance sheet's net assets. """ @@ -375,12 +376,12 @@ class BalanceSheet(pydantic_v1.BaseModel): assets: typing.Optional[typing.List[ReportItem]] liabilities: typing.Optional[typing.List[ReportItem]] equity: typing.Optional[typing.List[ReportItem]] - remote_generated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_generated_at: typing.Optional[dt.datetime] = pydantic.Field() """ The time that balance sheet was generated by the accounting system. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -388,20 +389,11 @@ class BalanceSheet(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/cash_flow_statement.py b/src/merge/resources/accounting/types/cash_flow_statement.py index b47bbfaf..5a6fb443 100644 --- a/src/merge/resources/accounting/types/cash_flow_statement.py +++ b/src/merge/resources/accounting/types/cash_flow_statement.py @@ -3,15 +3,16 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .cash_flow_statement_company import CashFlowStatementCompany from .cash_flow_statement_currency import CashFlowStatementCurrency from .remote_data import RemoteData from .report_item import ReportItem -class CashFlowStatement(pydantic_v1.BaseModel): +class CashFlowStatement(UniversalBaseModel): """ # The CashFlowStatement Object @@ -25,27 +26,27 @@ class CashFlowStatement(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The cash flow statement's name. """ - currency: typing.Optional[CashFlowStatementCurrency] = pydantic_v1.Field() + currency: typing.Optional[CashFlowStatementCurrency] = pydantic.Field() """ The cash flow statement's currency. @@ -357,27 +358,27 @@ class CashFlowStatement(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - company: typing.Optional[CashFlowStatementCompany] = pydantic_v1.Field() + company: typing.Optional[CashFlowStatementCompany] = pydantic.Field() """ The company the cash flow statement belongs to. """ - start_period: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_period: typing.Optional[dt.datetime] = pydantic.Field() """ The cash flow statement's start period. """ - end_period: typing.Optional[dt.datetime] = pydantic_v1.Field() + end_period: typing.Optional[dt.datetime] = pydantic.Field() """ The cash flow statement's end period. """ - cash_at_beginning_of_period: typing.Optional[float] = pydantic_v1.Field() + cash_at_beginning_of_period: typing.Optional[float] = pydantic.Field() """ Cash and cash equivalents at the beginning of the cash flow statement's period. """ - cash_at_end_of_period: typing.Optional[float] = pydantic_v1.Field() + cash_at_end_of_period: typing.Optional[float] = pydantic.Field() """ Cash and cash equivalents at the beginning of the cash flow statement's period. """ @@ -385,12 +386,12 @@ class CashFlowStatement(pydantic_v1.BaseModel): operating_activities: typing.Optional[typing.List[ReportItem]] investing_activities: typing.Optional[typing.List[ReportItem]] financing_activities: typing.Optional[typing.List[ReportItem]] - remote_generated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_generated_at: typing.Optional[dt.datetime] = pydantic.Field() """ The time that cash flow statement was generated by the accounting system. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -398,20 +399,11 @@ class CashFlowStatement(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/common_model_scope_api.py b/src/merge/resources/accounting/types/common_model_scope_api.py index d6fdec56..093001ba 100644 --- a/src/merge/resources/accounting/types/common_model_scope_api.py +++ b/src/merge/resources/accounting/types/common_model_scope_api.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .individual_common_model_scope_deserializer import IndividualCommonModelScopeDeserializer -class CommonModelScopeApi(pydantic_v1.BaseModel): - common_models: typing.List[IndividualCommonModelScopeDeserializer] = pydantic_v1.Field() +class CommonModelScopeApi(UniversalBaseModel): + common_models: typing.List[IndividualCommonModelScopeDeserializer] = pydantic.Field() """ The common models you want to update the scopes for """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/common_model_scopes_body_request.py b/src/merge/resources/accounting/types/common_model_scopes_body_request.py index e956cbdc..9098f031 100644 --- a/src/merge/resources/accounting/types/common_model_scopes_body_request.py +++ b/src/merge/resources/accounting/types/common_model_scopes_body_request.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .enabled_actions_enum import EnabledActionsEnum -class CommonModelScopesBodyRequest(pydantic_v1.BaseModel): +class CommonModelScopesBodyRequest(UniversalBaseModel): model_id: str enabled_actions: typing.List[EnabledActionsEnum] disabled_fields: typing.List[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/company_info.py b/src/merge/resources/accounting/types/company_info.py index 2eb8cd16..ce359d0f 100644 --- a/src/merge/resources/accounting/types/company_info.py +++ b/src/merge/resources/accounting/types/company_info.py @@ -3,15 +3,16 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .accounting_phone_number import AccountingPhoneNumber from .address import Address from .company_info_currency import CompanyInfoCurrency from .remote_data import RemoteData -class CompanyInfo(pydantic_v1.BaseModel): +class CompanyInfo(UniversalBaseModel): """ # The CompanyInfo Object @@ -25,47 +26,47 @@ class CompanyInfo(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The company's name. """ - legal_name: typing.Optional[str] = pydantic_v1.Field() + legal_name: typing.Optional[str] = pydantic.Field() """ The company's legal name. """ - tax_number: typing.Optional[str] = pydantic_v1.Field() + tax_number: typing.Optional[str] = pydantic.Field() """ The company's tax number. """ - fiscal_year_end_month: typing.Optional[int] = pydantic_v1.Field() + fiscal_year_end_month: typing.Optional[int] = pydantic.Field() """ The company's fiscal year end month. """ - fiscal_year_end_day: typing.Optional[int] = pydantic_v1.Field() + fiscal_year_end_day: typing.Optional[int] = pydantic.Field() """ The company's fiscal year end day. """ - currency: typing.Optional[CompanyInfoCurrency] = pydantic_v1.Field() + currency: typing.Optional[CompanyInfoCurrency] = pydantic.Field() """ The currency set in the company's accounting platform. @@ -377,19 +378,19 @@ class CompanyInfo(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's company was created. """ - urls: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + urls: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ The company's urls. """ addresses: typing.Optional[typing.List[Address]] phone_numbers: typing.Optional[typing.List[AccountingPhoneNumber]] - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -397,20 +398,11 @@ class CompanyInfo(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/contact.py b/src/merge/resources/accounting/types/contact.py index 0515a732..5832d4fb 100644 --- a/src/merge/resources/accounting/types/contact.py +++ b/src/merge/resources/accounting/types/contact.py @@ -3,15 +3,16 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .accounting_phone_number import AccountingPhoneNumber from .contact_addresses_item import ContactAddressesItem from .contact_status import ContactStatus from .remote_data import RemoteData -class Contact(pydantic_v1.BaseModel): +class Contact(UniversalBaseModel): """ # The Contact Object @@ -28,47 +29,47 @@ class Contact(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The contact's name. """ - is_supplier: typing.Optional[bool] = pydantic_v1.Field() + is_supplier: typing.Optional[bool] = pydantic.Field() """ Whether the contact is a supplier. """ - is_customer: typing.Optional[bool] = pydantic_v1.Field() + is_customer: typing.Optional[bool] = pydantic.Field() """ Whether the contact is a customer. """ - email_address: typing.Optional[str] = pydantic_v1.Field() + email_address: typing.Optional[str] = pydantic.Field() """ The contact's email address. """ - tax_number: typing.Optional[str] = pydantic_v1.Field() + tax_number: typing.Optional[str] = pydantic.Field() """ The contact's tax number. """ - status: typing.Optional[ContactStatus] = pydantic_v1.Field() + status: typing.Optional[ContactStatus] = pydantic.Field() """ The contact's status @@ -76,32 +77,32 @@ class Contact(pydantic_v1.BaseModel): - `ARCHIVED` - ARCHIVED """ - currency: typing.Optional[str] = pydantic_v1.Field() + currency: typing.Optional[str] = pydantic.Field() """ The currency the contact's transactions are in. """ - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's contact was updated. """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the contact belongs to. """ - addresses: typing.Optional[typing.List[typing.Optional[ContactAddressesItem]]] = pydantic_v1.Field() + addresses: typing.Optional[typing.List[typing.Optional[ContactAddressesItem]]] = pydantic.Field() """ `Address` object IDs for the given `Contacts` object. """ - phone_numbers: typing.Optional[typing.List[AccountingPhoneNumber]] = pydantic_v1.Field() + phone_numbers: typing.Optional[typing.List[AccountingPhoneNumber]] = pydantic.Field() """ `AccountingPhoneNumber` object for the given `Contacts` object. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -109,20 +110,11 @@ class Contact(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/contact_request.py b/src/merge/resources/accounting/types/contact_request.py index 5248e5b3..31e7715b 100644 --- a/src/merge/resources/accounting/types/contact_request.py +++ b/src/merge/resources/accounting/types/contact_request.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .accounting_phone_number_request import AccountingPhoneNumberRequest from .contact_request_addresses_item import ContactRequestAddressesItem from .contact_request_status import ContactRequestStatus -class ContactRequest(pydantic_v1.BaseModel): +class ContactRequest(UniversalBaseModel): """ # The Contact Object @@ -26,32 +26,32 @@ class ContactRequest(pydantic_v1.BaseModel): Fetch from the `LIST Contacts` endpoint and view a company's contacts. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The contact's name. """ - is_supplier: typing.Optional[bool] = pydantic_v1.Field() + is_supplier: typing.Optional[bool] = pydantic.Field() """ Whether the contact is a supplier. """ - is_customer: typing.Optional[bool] = pydantic_v1.Field() + is_customer: typing.Optional[bool] = pydantic.Field() """ Whether the contact is a customer. """ - email_address: typing.Optional[str] = pydantic_v1.Field() + email_address: typing.Optional[str] = pydantic.Field() """ The contact's email address. """ - tax_number: typing.Optional[str] = pydantic_v1.Field() + tax_number: typing.Optional[str] = pydantic.Field() """ The contact's tax number. """ - status: typing.Optional[ContactRequestStatus] = pydantic_v1.Field() + status: typing.Optional[ContactRequestStatus] = pydantic.Field() """ The contact's status @@ -59,22 +59,22 @@ class ContactRequest(pydantic_v1.BaseModel): - `ARCHIVED` - ARCHIVED """ - currency: typing.Optional[str] = pydantic_v1.Field() + currency: typing.Optional[str] = pydantic.Field() """ The currency the contact's transactions are in. """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the contact belongs to. """ - addresses: typing.Optional[typing.List[typing.Optional[ContactRequestAddressesItem]]] = pydantic_v1.Field() + addresses: typing.Optional[typing.List[typing.Optional[ContactRequestAddressesItem]]] = pydantic.Field() """ `Address` object IDs for the given `Contacts` object. """ - phone_numbers: typing.Optional[typing.List[AccountingPhoneNumberRequest]] = pydantic_v1.Field() + phone_numbers: typing.Optional[typing.List[AccountingPhoneNumberRequest]] = pydantic.Field() """ `AccountingPhoneNumber` object for the given `Contacts` object. """ @@ -82,20 +82,11 @@ class ContactRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/contact_response.py b/src/merge/resources/accounting/types/contact_response.py index 86e9ae26..d5fa7bd8 100644 --- a/src/merge/resources/accounting/types/contact_response.py +++ b/src/merge/resources/accounting/types/contact_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .contact import Contact from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .warning_validation_problem import WarningValidationProblem -class ContactResponse(pydantic_v1.BaseModel): +class ContactResponse(UniversalBaseModel): model: Contact warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/credit_note.py b/src/merge/resources/accounting/types/credit_note.py index 63e68183..c8ffaae3 100644 --- a/src/merge/resources/accounting/types/credit_note.py +++ b/src/merge/resources/accounting/types/credit_note.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .credit_note_accounting_period import CreditNoteAccountingPeriod from .credit_note_applied_payments_item import CreditNoteAppliedPaymentsItem from .credit_note_company import CreditNoteCompany @@ -17,7 +18,7 @@ from .remote_data import RemoteData -class CreditNote(pydantic_v1.BaseModel): +class CreditNote(UniversalBaseModel): """ # The CreditNote Object @@ -31,27 +32,27 @@ class CreditNote(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - transaction_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + transaction_date: typing.Optional[dt.datetime] = pydantic.Field() """ The credit note's transaction date. """ - status: typing.Optional[CreditNoteStatus] = pydantic_v1.Field() + status: typing.Optional[CreditNoteStatus] = pydantic.Field() """ The credit note's status. @@ -60,39 +61,39 @@ class CreditNote(pydantic_v1.BaseModel): - `PAID` - PAID """ - number: typing.Optional[str] = pydantic_v1.Field() + number: typing.Optional[str] = pydantic.Field() """ The credit note's number. """ - contact: typing.Optional[CreditNoteContact] = pydantic_v1.Field() + contact: typing.Optional[CreditNoteContact] = pydantic.Field() """ The credit note's contact. """ - company: typing.Optional[CreditNoteCompany] = pydantic_v1.Field() + company: typing.Optional[CreditNoteCompany] = pydantic.Field() """ The company the credit note belongs to. """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The credit note's exchange rate. """ - total_amount: typing.Optional[float] = pydantic_v1.Field() + total_amount: typing.Optional[float] = pydantic.Field() """ The credit note's total amount. """ - remaining_credit: typing.Optional[float] = pydantic_v1.Field() + remaining_credit: typing.Optional[float] = pydantic.Field() """ The amount of value remaining in the credit note that the customer can use. """ line_items: typing.Optional[typing.List[CreditNoteLineItem]] tracking_categories: typing.Optional[typing.List[typing.Optional[CreditNoteTrackingCategoriesItem]]] - currency: typing.Optional[CreditNoteCurrency] = pydantic_v1.Field() + currency: typing.Optional[CreditNoteCurrency] = pydantic.Field() """ The credit note's currency. @@ -404,32 +405,32 @@ class CreditNote(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's credit note was created. """ - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's credit note was updated. """ - payments: typing.Optional[typing.List[typing.Optional[CreditNotePaymentsItem]]] = pydantic_v1.Field() + payments: typing.Optional[typing.List[typing.Optional[CreditNotePaymentsItem]]] = pydantic.Field() """ Array of `Payment` object IDs """ - applied_payments: typing.Optional[typing.List[typing.Optional[CreditNoteAppliedPaymentsItem]]] = pydantic_v1.Field() + applied_payments: typing.Optional[typing.List[typing.Optional[CreditNoteAppliedPaymentsItem]]] = pydantic.Field() """ A list of the Payment Applied to Lines common models related to a given Invoice, Credit Note, or Journal Entry. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ - accounting_period: typing.Optional[CreditNoteAccountingPeriod] = pydantic_v1.Field() + accounting_period: typing.Optional[CreditNoteAccountingPeriod] = pydantic.Field() """ The accounting period that the CreditNote was generated in. """ @@ -437,20 +438,11 @@ class CreditNote(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/credit_note_line_item.py b/src/merge/resources/accounting/types/credit_note_line_item.py index 3f108ff3..dd951d55 100644 --- a/src/merge/resources/accounting/types/credit_note_line_item.py +++ b/src/merge/resources/accounting/types/credit_note_line_item.py @@ -3,99 +3,91 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .credit_note_line_item_company import CreditNoteLineItemCompany from .credit_note_line_item_item import CreditNoteLineItemItem -class CreditNoteLineItem(pydantic_v1.BaseModel): +class CreditNoteLineItem(UniversalBaseModel): id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ item: typing.Optional[CreditNoteLineItemItem] - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The credit note line item's name. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The description of the item that is owed. """ - quantity: typing.Optional[str] = pydantic_v1.Field() + quantity: typing.Optional[str] = pydantic.Field() """ The credit note line item's quantity. """ - memo: typing.Optional[str] = pydantic_v1.Field() + memo: typing.Optional[str] = pydantic.Field() """ The credit note line item's memo. """ - unit_price: typing.Optional[str] = pydantic_v1.Field() + unit_price: typing.Optional[str] = pydantic.Field() """ The credit note line item's unit price. """ - total_line_amount: typing.Optional[str] = pydantic_v1.Field() + total_line_amount: typing.Optional[str] = pydantic.Field() """ The credit note line item's total. """ - tracking_category: typing.Optional[str] = pydantic_v1.Field() + tracking_category: typing.Optional[str] = pydantic.Field() """ The credit note line item's associated tracking category. """ - tracking_categories: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + tracking_categories: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ The credit note line item's associated tracking categories. """ - account: typing.Optional[str] = pydantic_v1.Field() + account: typing.Optional[str] = pydantic.Field() """ The credit note line item's account. """ - company: typing.Optional[CreditNoteLineItemCompany] = pydantic_v1.Field() + company: typing.Optional[CreditNoteLineItemCompany] = pydantic.Field() """ The company the credit note belongs to. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/data_passthrough_request.py b/src/merge/resources/accounting/types/data_passthrough_request.py index fdf9b18c..3f9ee090 100644 --- a/src/merge/resources/accounting/types/data_passthrough_request.py +++ b/src/merge/resources/accounting/types/data_passthrough_request.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .method_enum import MethodEnum from .multipart_form_field_request import MultipartFormFieldRequest from .request_format_enum import RequestFormatEnum -class DataPassthroughRequest(pydantic_v1.BaseModel): +class DataPassthroughRequest(UniversalBaseModel): """ # The DataPassthrough Object @@ -24,51 +24,42 @@ class DataPassthroughRequest(pydantic_v1.BaseModel): """ method: MethodEnum - path: str = pydantic_v1.Field() + path: str = pydantic.Field() """ The path of the request in the third party's platform. """ - base_url_override: typing.Optional[str] = pydantic_v1.Field() + base_url_override: typing.Optional[str] = pydantic.Field() """ An optional override of the third party's base url for the request. """ - data: typing.Optional[str] = pydantic_v1.Field() + data: typing.Optional[str] = pydantic.Field() """ The data with the request. You must include a `request_format` parameter matching the data's format """ - multipart_form_data: typing.Optional[typing.List[MultipartFormFieldRequest]] = pydantic_v1.Field() + multipart_form_data: typing.Optional[typing.List[MultipartFormFieldRequest]] = pydantic.Field() """ Pass an array of `MultipartFormField` objects in here instead of using the `data` param if `request_format` is set to `MULTIPART`. """ - headers: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + headers: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ The headers to use for the request (Merge will handle the account's authorization headers). `Content-Type` header is required for passthrough. Choose content type corresponding to expected format of receiving server. """ request_format: typing.Optional[RequestFormatEnum] - normalize_response: typing.Optional[bool] = pydantic_v1.Field() + normalize_response: typing.Optional[bool] = pydantic.Field() """ Optional. If true, the response will always be an object of the form `{"type": T, "value": ...}` where `T` will be one of `string, boolean, number, null, array, object`. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/debug_mode_log.py b/src/merge/resources/accounting/types/debug_mode_log.py index 321c9090..8edea2ae 100644 --- a/src/merge/resources/accounting/types/debug_mode_log.py +++ b/src/merge/resources/accounting/types/debug_mode_log.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_model_log_summary import DebugModelLogSummary -class DebugModeLog(pydantic_v1.BaseModel): +class DebugModeLog(UniversalBaseModel): log_id: str dashboard_view: str log_summary: DebugModelLogSummary - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/debug_model_log_summary.py b/src/merge/resources/accounting/types/debug_model_log_summary.py index 06bb154b..4f5b07db 100644 --- a/src/merge/resources/accounting/types/debug_model_log_summary.py +++ b/src/merge/resources/accounting/types/debug_model_log_summary.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class DebugModelLogSummary(pydantic_v1.BaseModel): + +class DebugModelLogSummary(UniversalBaseModel): url: str method: str status_code: int - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/error_validation_problem.py b/src/merge/resources/accounting/types/error_validation_problem.py index 425af45c..3838491d 100644 --- a/src/merge/resources/accounting/types/error_validation_problem.py +++ b/src/merge/resources/accounting/types/error_validation_problem.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .validation_problem_source import ValidationProblemSource -class ErrorValidationProblem(pydantic_v1.BaseModel): +class ErrorValidationProblem(UniversalBaseModel): source: typing.Optional[ValidationProblemSource] title: str detail: str problem_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/expense.py b/src/merge/resources/accounting/types/expense.py index 3a76e358..394fceba 100644 --- a/src/merge/resources/accounting/types/expense.py +++ b/src/merge/resources/accounting/types/expense.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .expense_account import ExpenseAccount from .expense_accounting_period import ExpenseAccountingPeriod from .expense_company import ExpenseCompany @@ -15,7 +16,7 @@ from .remote_data import RemoteData -class Expense(pydantic_v1.BaseModel): +class Expense(UniversalBaseModel): """ # The Expense Object @@ -31,57 +32,57 @@ class Expense(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - transaction_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + transaction_date: typing.Optional[dt.datetime] = pydantic.Field() """ When the transaction occurred. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the expense was created. """ - account: typing.Optional[ExpenseAccount] = pydantic_v1.Field() + account: typing.Optional[ExpenseAccount] = pydantic.Field() """ The expense's payment account. """ - contact: typing.Optional[ExpenseContact] = pydantic_v1.Field() + contact: typing.Optional[ExpenseContact] = pydantic.Field() """ The expense's contact. """ - total_amount: typing.Optional[float] = pydantic_v1.Field() + total_amount: typing.Optional[float] = pydantic.Field() """ The expense's total amount. """ - sub_total: typing.Optional[float] = pydantic_v1.Field() + sub_total: typing.Optional[float] = pydantic.Field() """ The expense's total amount before tax. """ - total_tax_amount: typing.Optional[float] = pydantic_v1.Field() + total_tax_amount: typing.Optional[float] = pydantic.Field() """ The expense's total tax amount. """ - currency: typing.Optional[ExpenseCurrency] = pydantic_v1.Field() + currency: typing.Optional[ExpenseCurrency] = pydantic.Field() """ The expense's currency. @@ -393,29 +394,29 @@ class Expense(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The expense's exchange rate. """ - company: typing.Optional[ExpenseCompany] = pydantic_v1.Field() + company: typing.Optional[ExpenseCompany] = pydantic.Field() """ The company the expense belongs to. """ - memo: typing.Optional[str] = pydantic_v1.Field() + memo: typing.Optional[str] = pydantic.Field() """ The expense's private note. """ lines: typing.Optional[typing.List[ExpenseLine]] tracking_categories: typing.Optional[typing.List[typing.Optional[ExpenseTrackingCategoriesItem]]] - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ - accounting_period: typing.Optional[ExpenseAccountingPeriod] = pydantic_v1.Field() + accounting_period: typing.Optional[ExpenseAccountingPeriod] = pydantic.Field() """ The accounting period that the Expense was generated in. """ @@ -423,20 +424,11 @@ class Expense(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/expense_line.py b/src/merge/resources/accounting/types/expense_line.py index d259a86a..1d7a9374 100644 --- a/src/merge/resources/accounting/types/expense_line.py +++ b/src/merge/resources/accounting/types/expense_line.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .expense_line_account import ExpenseLineAccount from .expense_line_contact import ExpenseLineContact from .expense_line_currency import ExpenseLineCurrency @@ -13,7 +14,7 @@ from .expense_line_tracking_category import ExpenseLineTrackingCategory -class ExpenseLine(pydantic_v1.BaseModel): +class ExpenseLine(UniversalBaseModel): """ # The ExpenseLine Object @@ -27,39 +28,39 @@ class ExpenseLine(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - item: typing.Optional[ExpenseLineItem] = pydantic_v1.Field() + item: typing.Optional[ExpenseLineItem] = pydantic.Field() """ The line's item. """ - net_amount: typing.Optional[float] = pydantic_v1.Field() + net_amount: typing.Optional[float] = pydantic.Field() """ The line's net amount. """ tracking_category: typing.Optional[ExpenseLineTrackingCategory] tracking_categories: typing.Optional[typing.List[typing.Optional[ExpenseLineTrackingCategoriesItem]]] - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the line belongs to. """ - currency: typing.Optional[ExpenseLineCurrency] = pydantic_v1.Field() + currency: typing.Optional[ExpenseLineCurrency] = pydantic.Field() """ The expense line item's currency. @@ -371,45 +372,36 @@ class ExpenseLine(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - account: typing.Optional[ExpenseLineAccount] = pydantic_v1.Field() + account: typing.Optional[ExpenseLineAccount] = pydantic.Field() """ The expense's payment account. """ - contact: typing.Optional[ExpenseLineContact] = pydantic_v1.Field() + contact: typing.Optional[ExpenseLineContact] = pydantic.Field() """ The expense's contact. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The description of the item that was purchased by the company. """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The expense line item's exchange rate. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/expense_line_request.py b/src/merge/resources/accounting/types/expense_line_request.py index e329103b..e9d00f3f 100644 --- a/src/merge/resources/accounting/types/expense_line_request.py +++ b/src/merge/resources/accounting/types/expense_line_request.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .expense_line_request_account import ExpenseLineRequestAccount from .expense_line_request_contact import ExpenseLineRequestContact from .expense_line_request_currency import ExpenseLineRequestCurrency @@ -13,7 +13,7 @@ from .expense_line_request_tracking_category import ExpenseLineRequestTrackingCategory -class ExpenseLineRequest(pydantic_v1.BaseModel): +class ExpenseLineRequest(UniversalBaseModel): """ # The ExpenseLine Object @@ -26,29 +26,29 @@ class ExpenseLineRequest(pydantic_v1.BaseModel): Fetch from the `GET Expense` endpoint and view the expense's line items. """ - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - item: typing.Optional[ExpenseLineRequestItem] = pydantic_v1.Field() + item: typing.Optional[ExpenseLineRequestItem] = pydantic.Field() """ The line's item. """ - net_amount: typing.Optional[float] = pydantic_v1.Field() + net_amount: typing.Optional[float] = pydantic.Field() """ The line's net amount. """ tracking_category: typing.Optional[ExpenseLineRequestTrackingCategory] tracking_categories: typing.Optional[typing.List[typing.Optional[ExpenseLineRequestTrackingCategoriesItem]]] - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the line belongs to. """ - currency: typing.Optional[ExpenseLineRequestCurrency] = pydantic_v1.Field() + currency: typing.Optional[ExpenseLineRequestCurrency] = pydantic.Field() """ The expense line item's currency. @@ -360,22 +360,22 @@ class ExpenseLineRequest(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - account: typing.Optional[ExpenseLineRequestAccount] = pydantic_v1.Field() + account: typing.Optional[ExpenseLineRequestAccount] = pydantic.Field() """ The expense's payment account. """ - contact: typing.Optional[ExpenseLineRequestContact] = pydantic_v1.Field() + contact: typing.Optional[ExpenseLineRequestContact] = pydantic.Field() """ The expense's contact. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The description of the item that was purchased by the company. """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The expense line item's exchange rate. """ @@ -383,20 +383,11 @@ class ExpenseLineRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/expense_request.py b/src/merge/resources/accounting/types/expense_request.py index 3f71ea04..3b0772e4 100644 --- a/src/merge/resources/accounting/types/expense_request.py +++ b/src/merge/resources/accounting/types/expense_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .expense_line_request import ExpenseLineRequest from .expense_request_account import ExpenseRequestAccount from .expense_request_accounting_period import ExpenseRequestAccountingPeriod @@ -14,7 +15,7 @@ from .expense_request_tracking_categories_item import ExpenseRequestTrackingCategoriesItem -class ExpenseRequest(pydantic_v1.BaseModel): +class ExpenseRequest(UniversalBaseModel): """ # The Expense Object @@ -29,37 +30,37 @@ class ExpenseRequest(pydantic_v1.BaseModel): Fetch from the `GET Expense` endpoint and view a company's expense. """ - transaction_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + transaction_date: typing.Optional[dt.datetime] = pydantic.Field() """ When the transaction occurred. """ - account: typing.Optional[ExpenseRequestAccount] = pydantic_v1.Field() + account: typing.Optional[ExpenseRequestAccount] = pydantic.Field() """ The expense's payment account. """ - contact: typing.Optional[ExpenseRequestContact] = pydantic_v1.Field() + contact: typing.Optional[ExpenseRequestContact] = pydantic.Field() """ The expense's contact. """ - total_amount: typing.Optional[float] = pydantic_v1.Field() + total_amount: typing.Optional[float] = pydantic.Field() """ The expense's total amount. """ - sub_total: typing.Optional[float] = pydantic_v1.Field() + sub_total: typing.Optional[float] = pydantic.Field() """ The expense's total amount before tax. """ - total_tax_amount: typing.Optional[float] = pydantic_v1.Field() + total_tax_amount: typing.Optional[float] = pydantic.Field() """ The expense's total tax amount. """ - currency: typing.Optional[ExpenseRequestCurrency] = pydantic_v1.Field() + currency: typing.Optional[ExpenseRequestCurrency] = pydantic.Field() """ The expense's currency. @@ -371,24 +372,24 @@ class ExpenseRequest(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The expense's exchange rate. """ - company: typing.Optional[ExpenseRequestCompany] = pydantic_v1.Field() + company: typing.Optional[ExpenseRequestCompany] = pydantic.Field() """ The company the expense belongs to. """ - memo: typing.Optional[str] = pydantic_v1.Field() + memo: typing.Optional[str] = pydantic.Field() """ The expense's private note. """ lines: typing.Optional[typing.List[ExpenseLineRequest]] tracking_categories: typing.Optional[typing.List[typing.Optional[ExpenseRequestTrackingCategoriesItem]]] - accounting_period: typing.Optional[ExpenseRequestAccountingPeriod] = pydantic_v1.Field() + accounting_period: typing.Optional[ExpenseRequestAccountingPeriod] = pydantic.Field() """ The accounting period that the Expense was generated in. """ @@ -396,20 +397,11 @@ class ExpenseRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/expense_response.py b/src/merge/resources/accounting/types/expense_response.py index cb2549df..0ed502fa 100644 --- a/src/merge/resources/accounting/types/expense_response.py +++ b/src/merge/resources/accounting/types/expense_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .expense import Expense from .warning_validation_problem import WarningValidationProblem -class ExpenseResponse(pydantic_v1.BaseModel): +class ExpenseResponse(UniversalBaseModel): model: Expense warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/external_target_field_api.py b/src/merge/resources/accounting/types/external_target_field_api.py index a97d536a..8a971c64 100644 --- a/src/merge/resources/accounting/types/external_target_field_api.py +++ b/src/merge/resources/accounting/types/external_target_field_api.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ExternalTargetFieldApi(pydantic_v1.BaseModel): + +class ExternalTargetFieldApi(UniversalBaseModel): name: typing.Optional[str] description: typing.Optional[str] is_mapped: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/external_target_field_api_response.py b/src/merge/resources/accounting/types/external_target_field_api_response.py index 610d9f29..9f00ba82 100644 --- a/src/merge/resources/accounting/types/external_target_field_api_response.py +++ b/src/merge/resources/accounting/types/external_target_field_api_response.py @@ -1,58 +1,45 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .external_target_field_api import ExternalTargetFieldApi -class ExternalTargetFieldApiResponse(pydantic_v1.BaseModel): - account: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Account") - accounting_attachment: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field( +class ExternalTargetFieldApiResponse(UniversalBaseModel): + account: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Account") + accounting_attachment: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field( alias="AccountingAttachment" ) - balance_sheet: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="BalanceSheet") - cash_flow_statement: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field( + balance_sheet: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="BalanceSheet") + cash_flow_statement: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field( alias="CashFlowStatement" ) - company_info: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="CompanyInfo") - contact: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Contact") - income_statement: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="IncomeStatement") - credit_note: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="CreditNote") - item: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Item") - purchase_order: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="PurchaseOrder") - tracking_category: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field( - alias="TrackingCategory" - ) - journal_entry: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="JournalEntry") - tax_rate: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="TaxRate") - invoice: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Invoice") - payment: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Payment") - expense: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Expense") - vendor_credit: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="VendorCredit") - transaction: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Transaction") - general_ledger_transaction: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field( + company_info: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="CompanyInfo") + contact: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Contact") + income_statement: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="IncomeStatement") + credit_note: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="CreditNote") + item: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Item") + purchase_order: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="PurchaseOrder") + tracking_category: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="TrackingCategory") + journal_entry: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="JournalEntry") + tax_rate: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="TaxRate") + invoice: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Invoice") + payment: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Payment") + expense: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Expense") + vendor_credit: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="VendorCredit") + transaction: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Transaction") + general_ledger_transaction: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field( alias="GeneralLedgerTransaction" ) - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/field_mapping_api_instance.py b/src/merge/resources/accounting/types/field_mapping_api_instance.py index d9d7670d..8af85a52 100644 --- a/src/merge/resources/accounting/types/field_mapping_api_instance.py +++ b/src/merge/resources/accounting/types/field_mapping_api_instance.py @@ -1,34 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_mapping_api_instance_remote_field import FieldMappingApiInstanceRemoteField from .field_mapping_api_instance_target_field import FieldMappingApiInstanceTargetField -class FieldMappingApiInstance(pydantic_v1.BaseModel): +class FieldMappingApiInstance(UniversalBaseModel): id: typing.Optional[str] is_integration_wide: typing.Optional[bool] target_field: typing.Optional[FieldMappingApiInstanceTargetField] remote_field: typing.Optional[FieldMappingApiInstanceRemoteField] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/field_mapping_api_instance_remote_field.py b/src/merge/resources/accounting/types/field_mapping_api_instance_remote_field.py index 9539d3dc..0635b76f 100644 --- a/src/merge/resources/accounting/types/field_mapping_api_instance_remote_field.py +++ b/src/merge/resources/accounting/types/field_mapping_api_instance_remote_field.py @@ -1,36 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_mapping_api_instance_remote_field_remote_endpoint_info import ( FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo, ) -class FieldMappingApiInstanceRemoteField(pydantic_v1.BaseModel): +class FieldMappingApiInstanceRemoteField(UniversalBaseModel): remote_key_name: str - schema_: typing.Dict[str, typing.Any] = pydantic_v1.Field(alias="schema") + schema_: typing.Dict[str, typing.Any] = pydantic.Field(alias="schema") remote_endpoint_info: FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py b/src/merge/resources/accounting/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py index d9fcc276..e34eb6e4 100644 --- a/src/merge/resources/accounting/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py +++ b/src/merge/resources/accounting/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo(pydantic_v1.BaseModel): + +class FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo(UniversalBaseModel): method: typing.Optional[str] url_path: typing.Optional[str] field_traversal_path: typing.Optional[typing.List[str]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/field_mapping_api_instance_response.py b/src/merge/resources/accounting/types/field_mapping_api_instance_response.py index d2de12fb..818585d2 100644 --- a/src/merge/resources/accounting/types/field_mapping_api_instance_response.py +++ b/src/merge/resources/accounting/types/field_mapping_api_instance_response.py @@ -1,58 +1,45 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_mapping_api_instance import FieldMappingApiInstance -class FieldMappingApiInstanceResponse(pydantic_v1.BaseModel): - account: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Account") - accounting_attachment: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field( +class FieldMappingApiInstanceResponse(UniversalBaseModel): + account: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Account") + accounting_attachment: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field( alias="AccountingAttachment" ) - balance_sheet: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="BalanceSheet") - cash_flow_statement: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field( + balance_sheet: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="BalanceSheet") + cash_flow_statement: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field( alias="CashFlowStatement" ) - company_info: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="CompanyInfo") - contact: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Contact") - income_statement: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="IncomeStatement") - credit_note: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="CreditNote") - item: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Item") - purchase_order: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="PurchaseOrder") - tracking_category: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field( - alias="TrackingCategory" - ) - journal_entry: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="JournalEntry") - tax_rate: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="TaxRate") - invoice: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Invoice") - payment: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Payment") - expense: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Expense") - vendor_credit: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="VendorCredit") - transaction: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Transaction") - general_ledger_transaction: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field( + company_info: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="CompanyInfo") + contact: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Contact") + income_statement: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="IncomeStatement") + credit_note: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="CreditNote") + item: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Item") + purchase_order: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="PurchaseOrder") + tracking_category: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="TrackingCategory") + journal_entry: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="JournalEntry") + tax_rate: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="TaxRate") + invoice: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Invoice") + payment: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Payment") + expense: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Expense") + vendor_credit: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="VendorCredit") + transaction: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Transaction") + general_ledger_transaction: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field( alias="GeneralLedgerTransaction" ) - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/field_mapping_api_instance_target_field.py b/src/merge/resources/accounting/types/field_mapping_api_instance_target_field.py index 25a8dcff..c590d4ce 100644 --- a/src/merge/resources/accounting/types/field_mapping_api_instance_target_field.py +++ b/src/merge/resources/accounting/types/field_mapping_api_instance_target_field.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldMappingApiInstanceTargetField(pydantic_v1.BaseModel): + +class FieldMappingApiInstanceTargetField(UniversalBaseModel): name: str description: str is_organization_wide: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/field_mapping_instance_response.py b/src/merge/resources/accounting/types/field_mapping_instance_response.py index b55d2c40..aaf06f0e 100644 --- a/src/merge/resources/accounting/types/field_mapping_instance_response.py +++ b/src/merge/resources/accounting/types/field_mapping_instance_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .field_mapping_api_instance import FieldMappingApiInstance from .warning_validation_problem import WarningValidationProblem -class FieldMappingInstanceResponse(pydantic_v1.BaseModel): +class FieldMappingInstanceResponse(UniversalBaseModel): model: FieldMappingApiInstance warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/field_permission_deserializer.py b/src/merge/resources/accounting/types/field_permission_deserializer.py index 124f3deb..ed80b9d6 100644 --- a/src/merge/resources/accounting/types/field_permission_deserializer.py +++ b/src/merge/resources/accounting/types/field_permission_deserializer.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldPermissionDeserializer(pydantic_v1.BaseModel): + +class FieldPermissionDeserializer(UniversalBaseModel): enabled: typing.Optional[typing.List[typing.Any]] disabled: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/field_permission_deserializer_request.py b/src/merge/resources/accounting/types/field_permission_deserializer_request.py index 65e80e75..e937e743 100644 --- a/src/merge/resources/accounting/types/field_permission_deserializer_request.py +++ b/src/merge/resources/accounting/types/field_permission_deserializer_request.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldPermissionDeserializerRequest(pydantic_v1.BaseModel): + +class FieldPermissionDeserializerRequest(UniversalBaseModel): enabled: typing.Optional[typing.List[typing.Any]] disabled: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/income_statement.py b/src/merge/resources/accounting/types/income_statement.py index 08450396..491511ea 100644 --- a/src/merge/resources/accounting/types/income_statement.py +++ b/src/merge/resources/accounting/types/income_statement.py @@ -3,15 +3,16 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .income_statement_company import IncomeStatementCompany from .income_statement_currency import IncomeStatementCurrency from .remote_data import RemoteData from .report_item import ReportItem -class IncomeStatement(pydantic_v1.BaseModel): +class IncomeStatement(UniversalBaseModel): """ # The IncomeStatement Object @@ -25,27 +26,27 @@ class IncomeStatement(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The income statement's name. """ - currency: typing.Optional[IncomeStatementCurrency] = pydantic_v1.Field() + currency: typing.Optional[IncomeStatementCurrency] = pydantic.Field() """ The income statement's currency. @@ -357,41 +358,41 @@ class IncomeStatement(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - company: typing.Optional[IncomeStatementCompany] = pydantic_v1.Field() + company: typing.Optional[IncomeStatementCompany] = pydantic.Field() """ The company the income statement belongs to. """ - start_period: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_period: typing.Optional[dt.datetime] = pydantic.Field() """ The income statement's start period. """ - end_period: typing.Optional[dt.datetime] = pydantic_v1.Field() + end_period: typing.Optional[dt.datetime] = pydantic.Field() """ The income statement's end period. """ income: typing.Optional[typing.List[ReportItem]] cost_of_sales: typing.Optional[typing.List[ReportItem]] - gross_profit: typing.Optional[float] = pydantic_v1.Field() + gross_profit: typing.Optional[float] = pydantic.Field() """ The revenue minus the cost of sale. """ operating_expenses: typing.Optional[typing.List[ReportItem]] - net_operating_income: typing.Optional[float] = pydantic_v1.Field() + net_operating_income: typing.Optional[float] = pydantic.Field() """ The revenue minus the operating expenses. """ non_operating_expenses: typing.Optional[typing.List[ReportItem]] - net_income: typing.Optional[float] = pydantic_v1.Field() + net_income: typing.Optional[float] = pydantic.Field() """ The gross profit minus the total expenses. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -399,20 +400,11 @@ class IncomeStatement(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/individual_common_model_scope_deserializer.py b/src/merge/resources/accounting/types/individual_common_model_scope_deserializer.py index d80ca06e..ffa55055 100644 --- a/src/merge/resources/accounting/types/individual_common_model_scope_deserializer.py +++ b/src/merge/resources/accounting/types/individual_common_model_scope_deserializer.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_permission_deserializer import FieldPermissionDeserializer from .model_permission_deserializer import ModelPermissionDeserializer -class IndividualCommonModelScopeDeserializer(pydantic_v1.BaseModel): +class IndividualCommonModelScopeDeserializer(UniversalBaseModel): model_name: str model_permissions: typing.Optional[typing.Dict[str, ModelPermissionDeserializer]] field_permissions: typing.Optional[FieldPermissionDeserializer] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/individual_common_model_scope_deserializer_request.py b/src/merge/resources/accounting/types/individual_common_model_scope_deserializer_request.py index 8f2e7de5..d0e68f6d 100644 --- a/src/merge/resources/accounting/types/individual_common_model_scope_deserializer_request.py +++ b/src/merge/resources/accounting/types/individual_common_model_scope_deserializer_request.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_permission_deserializer_request import FieldPermissionDeserializerRequest from .model_permission_deserializer_request import ModelPermissionDeserializerRequest -class IndividualCommonModelScopeDeserializerRequest(pydantic_v1.BaseModel): +class IndividualCommonModelScopeDeserializerRequest(UniversalBaseModel): model_name: str model_permissions: typing.Optional[typing.Dict[str, ModelPermissionDeserializerRequest]] field_permissions: typing.Optional[FieldPermissionDeserializerRequest] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/invoice.py b/src/merge/resources/accounting/types/invoice.py index bef59576..0f5f04e4 100644 --- a/src/merge/resources/accounting/types/invoice.py +++ b/src/merge/resources/accounting/types/invoice.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .invoice_accounting_period import InvoiceAccountingPeriod from .invoice_applied_payments_item import InvoiceAppliedPaymentsItem from .invoice_company import InvoiceCompany @@ -19,7 +20,7 @@ from .remote_data import RemoteData -class Invoice(pydantic_v1.BaseModel): +class Invoice(UniversalBaseModel): """ # The Invoice Object @@ -33,22 +34,22 @@ class Invoice(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - type: typing.Optional[InvoiceType] = pydantic_v1.Field() + type: typing.Optional[InvoiceType] = pydantic.Field() """ Whether the invoice is an accounts receivable or accounts payable. If `type` is `ACCOUNTS_PAYABLE`, the invoice is a bill. If `type` is `ACCOUNTS_RECEIVABLE`, it is an invoice. @@ -56,42 +57,42 @@ class Invoice(pydantic_v1.BaseModel): - `ACCOUNTS_PAYABLE` - ACCOUNTS_PAYABLE """ - contact: typing.Optional[InvoiceContact] = pydantic_v1.Field() + contact: typing.Optional[InvoiceContact] = pydantic.Field() """ The invoice's contact. """ - number: typing.Optional[str] = pydantic_v1.Field() + number: typing.Optional[str] = pydantic.Field() """ The invoice's number. """ - issue_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + issue_date: typing.Optional[dt.datetime] = pydantic.Field() """ The invoice's issue date. """ - due_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + due_date: typing.Optional[dt.datetime] = pydantic.Field() """ The invoice's due date. """ - paid_on_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + paid_on_date: typing.Optional[dt.datetime] = pydantic.Field() """ The invoice's paid date. """ - memo: typing.Optional[str] = pydantic_v1.Field() + memo: typing.Optional[str] = pydantic.Field() """ The invoice's private note. """ - company: typing.Optional[InvoiceCompany] = pydantic_v1.Field() + company: typing.Optional[InvoiceCompany] = pydantic.Field() """ The company the invoice belongs to. """ - currency: typing.Optional[InvoiceCurrency] = pydantic_v1.Field() + currency: typing.Optional[InvoiceCurrency] = pydantic.Field() """ The invoice's currency. @@ -403,22 +404,22 @@ class Invoice(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The invoice's exchange rate. """ - total_discount: typing.Optional[float] = pydantic_v1.Field() + total_discount: typing.Optional[float] = pydantic.Field() """ The total discounts applied to the total cost. """ - sub_total: typing.Optional[float] = pydantic_v1.Field() + sub_total: typing.Optional[float] = pydantic.Field() """ The total amount being paid before taxes. """ - status: typing.Optional[InvoiceStatus] = pydantic_v1.Field() + status: typing.Optional[InvoiceStatus] = pydantic.Field() """ The status of the invoice. @@ -430,40 +431,40 @@ class Invoice(pydantic_v1.BaseModel): - `VOID` - VOID """ - total_tax_amount: typing.Optional[float] = pydantic_v1.Field() + total_tax_amount: typing.Optional[float] = pydantic.Field() """ The total amount being paid in taxes. """ - total_amount: typing.Optional[float] = pydantic_v1.Field() + total_amount: typing.Optional[float] = pydantic.Field() """ The invoice's total amount. """ - balance: typing.Optional[float] = pydantic_v1.Field() + balance: typing.Optional[float] = pydantic.Field() """ The invoice's remaining balance. """ - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's invoice entry was updated. """ tracking_categories: typing.Optional[typing.List[typing.Optional[InvoiceTrackingCategoriesItem]]] - payments: typing.Optional[typing.List[typing.Optional[InvoicePaymentsItem]]] = pydantic_v1.Field() + payments: typing.Optional[typing.List[typing.Optional[InvoicePaymentsItem]]] = pydantic.Field() """ Array of `Payment` object IDs. """ - applied_payments: typing.Optional[typing.List[typing.Optional[InvoiceAppliedPaymentsItem]]] = pydantic_v1.Field() + applied_payments: typing.Optional[typing.List[typing.Optional[InvoiceAppliedPaymentsItem]]] = pydantic.Field() """ A list of the Payment Applied to Lines common models related to a given Invoice, Credit Note, or Journal Entry. """ line_items: typing.Optional[typing.List[InvoiceLineItem]] remote_was_deleted: typing.Optional[bool] - accounting_period: typing.Optional[InvoiceAccountingPeriod] = pydantic_v1.Field() + accounting_period: typing.Optional[InvoiceAccountingPeriod] = pydantic.Field() """ The accounting period that the Invoice was generated in. """ @@ -472,20 +473,11 @@ class Invoice(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/invoice_line_item.py b/src/merge/resources/accounting/types/invoice_line_item.py index ecef9df8..1734cc90 100644 --- a/src/merge/resources/accounting/types/invoice_line_item.py +++ b/src/merge/resources/accounting/types/invoice_line_item.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .invoice_line_item_account import InvoiceLineItemAccount from .invoice_line_item_currency import InvoiceLineItemCurrency from .invoice_line_item_item import InvoiceLineItemItem @@ -12,7 +13,7 @@ from .invoice_line_item_tracking_category import InvoiceLineItemTrackingCategory -class InvoiceLineItem(pydantic_v1.BaseModel): +class InvoiceLineItem(UniversalBaseModel): """ # The InvoiceLineItem Object @@ -26,42 +27,42 @@ class InvoiceLineItem(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The line item's description. """ - unit_price: typing.Optional[float] = pydantic_v1.Field() + unit_price: typing.Optional[float] = pydantic.Field() """ The line item's unit price. """ - quantity: typing.Optional[float] = pydantic_v1.Field() + quantity: typing.Optional[float] = pydantic.Field() """ The line item's quantity. """ - total_amount: typing.Optional[float] = pydantic_v1.Field() + total_amount: typing.Optional[float] = pydantic.Field() """ The line item's total amount. """ - currency: typing.Optional[InvoiceLineItemCurrency] = pydantic_v1.Field() + currency: typing.Optional[InvoiceLineItemCurrency] = pydantic.Field() """ The line item's currency. @@ -373,7 +374,7 @@ class InvoiceLineItem(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The line item's exchange rate. """ @@ -382,32 +383,23 @@ class InvoiceLineItem(pydantic_v1.BaseModel): account: typing.Optional[InvoiceLineItemAccount] tracking_category: typing.Optional[InvoiceLineItemTrackingCategory] tracking_categories: typing.Optional[typing.List[typing.Optional[InvoiceLineItemTrackingCategoriesItem]]] - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the line item belongs to. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ field_mappings: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/invoice_line_item_request.py b/src/merge/resources/accounting/types/invoice_line_item_request.py index a539cde5..ec0f7453 100644 --- a/src/merge/resources/accounting/types/invoice_line_item_request.py +++ b/src/merge/resources/accounting/types/invoice_line_item_request.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .invoice_line_item_request_account import InvoiceLineItemRequestAccount from .invoice_line_item_request_currency import InvoiceLineItemRequestCurrency from .invoice_line_item_request_item import InvoiceLineItemRequestItem @@ -12,7 +12,7 @@ from .invoice_line_item_request_tracking_category import InvoiceLineItemRequestTrackingCategory -class InvoiceLineItemRequest(pydantic_v1.BaseModel): +class InvoiceLineItemRequest(UniversalBaseModel): """ # The InvoiceLineItem Object @@ -25,32 +25,32 @@ class InvoiceLineItemRequest(pydantic_v1.BaseModel): Fetch from the `GET Invoice` endpoint and view the invoice's line items. """ - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The line item's description. """ - unit_price: typing.Optional[float] = pydantic_v1.Field() + unit_price: typing.Optional[float] = pydantic.Field() """ The line item's unit price. """ - quantity: typing.Optional[float] = pydantic_v1.Field() + quantity: typing.Optional[float] = pydantic.Field() """ The line item's quantity. """ - total_amount: typing.Optional[float] = pydantic_v1.Field() + total_amount: typing.Optional[float] = pydantic.Field() """ The line item's total amount. """ - currency: typing.Optional[InvoiceLineItemRequestCurrency] = pydantic_v1.Field() + currency: typing.Optional[InvoiceLineItemRequestCurrency] = pydantic.Field() """ The line item's currency. @@ -362,7 +362,7 @@ class InvoiceLineItemRequest(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The line item's exchange rate. """ @@ -371,7 +371,7 @@ class InvoiceLineItemRequest(pydantic_v1.BaseModel): account: typing.Optional[InvoiceLineItemRequestAccount] tracking_category: typing.Optional[InvoiceLineItemRequestTrackingCategory] tracking_categories: typing.Optional[typing.List[typing.Optional[InvoiceLineItemRequestTrackingCategoriesItem]]] - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the line item belongs to. """ @@ -379,20 +379,11 @@ class InvoiceLineItemRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/invoice_request.py b/src/merge/resources/accounting/types/invoice_request.py index 032c0edf..debceb54 100644 --- a/src/merge/resources/accounting/types/invoice_request.py +++ b/src/merge/resources/accounting/types/invoice_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .invoice_line_item_request import InvoiceLineItemRequest from .invoice_request_company import InvoiceRequestCompany from .invoice_request_contact import InvoiceRequestContact @@ -16,7 +17,7 @@ from .invoice_request_type import InvoiceRequestType -class InvoiceRequest(pydantic_v1.BaseModel): +class InvoiceRequest(UniversalBaseModel): """ # The Invoice Object @@ -29,7 +30,7 @@ class InvoiceRequest(pydantic_v1.BaseModel): Fetch from the `LIST Invoices` endpoint and view a company's invoices. """ - type: typing.Optional[InvoiceRequestType] = pydantic_v1.Field() + type: typing.Optional[InvoiceRequestType] = pydantic.Field() """ Whether the invoice is an accounts receivable or accounts payable. If `type` is `ACCOUNTS_PAYABLE`, the invoice is a bill. If `type` is `ACCOUNTS_RECEIVABLE`, it is an invoice. @@ -37,37 +38,37 @@ class InvoiceRequest(pydantic_v1.BaseModel): - `ACCOUNTS_PAYABLE` - ACCOUNTS_PAYABLE """ - contact: typing.Optional[InvoiceRequestContact] = pydantic_v1.Field() + contact: typing.Optional[InvoiceRequestContact] = pydantic.Field() """ The invoice's contact. """ - number: typing.Optional[str] = pydantic_v1.Field() + number: typing.Optional[str] = pydantic.Field() """ The invoice's number. """ - issue_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + issue_date: typing.Optional[dt.datetime] = pydantic.Field() """ The invoice's issue date. """ - due_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + due_date: typing.Optional[dt.datetime] = pydantic.Field() """ The invoice's due date. """ - paid_on_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + paid_on_date: typing.Optional[dt.datetime] = pydantic.Field() """ The invoice's paid date. """ - memo: typing.Optional[str] = pydantic_v1.Field() + memo: typing.Optional[str] = pydantic.Field() """ The invoice's private note. """ - status: typing.Optional[InvoiceRequestStatus] = pydantic_v1.Field() + status: typing.Optional[InvoiceRequestStatus] = pydantic.Field() """ The status of the invoice. @@ -79,12 +80,12 @@ class InvoiceRequest(pydantic_v1.BaseModel): - `VOID` - VOID """ - company: typing.Optional[InvoiceRequestCompany] = pydantic_v1.Field() + company: typing.Optional[InvoiceRequestCompany] = pydantic.Field() """ The company the invoice belongs to. """ - currency: typing.Optional[InvoiceRequestCurrency] = pydantic_v1.Field() + currency: typing.Optional[InvoiceRequestCurrency] = pydantic.Field() """ The invoice's currency. @@ -396,37 +397,37 @@ class InvoiceRequest(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The invoice's exchange rate. """ - total_discount: typing.Optional[float] = pydantic_v1.Field() + total_discount: typing.Optional[float] = pydantic.Field() """ The total discounts applied to the total cost. """ - sub_total: typing.Optional[float] = pydantic_v1.Field() + sub_total: typing.Optional[float] = pydantic.Field() """ The total amount being paid before taxes. """ - total_tax_amount: typing.Optional[float] = pydantic_v1.Field() + total_tax_amount: typing.Optional[float] = pydantic.Field() """ The total amount being paid in taxes. """ - total_amount: typing.Optional[float] = pydantic_v1.Field() + total_amount: typing.Optional[float] = pydantic.Field() """ The invoice's total amount. """ - balance: typing.Optional[float] = pydantic_v1.Field() + balance: typing.Optional[float] = pydantic.Field() """ The invoice's remaining balance. """ - payments: typing.Optional[typing.List[typing.Optional[InvoiceRequestPaymentsItem]]] = pydantic_v1.Field() + payments: typing.Optional[typing.List[typing.Optional[InvoiceRequestPaymentsItem]]] = pydantic.Field() """ Array of `Payment` object IDs. """ @@ -437,20 +438,11 @@ class InvoiceRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/invoice_response.py b/src/merge/resources/accounting/types/invoice_response.py index 0aa5deaf..ed8047ce 100644 --- a/src/merge/resources/accounting/types/invoice_response.py +++ b/src/merge/resources/accounting/types/invoice_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .invoice import Invoice from .warning_validation_problem import WarningValidationProblem -class InvoiceResponse(pydantic_v1.BaseModel): +class InvoiceResponse(UniversalBaseModel): model: Invoice warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/issue.py b/src/merge/resources/accounting/types/issue.py index 086a0db2..28366731 100644 --- a/src/merge/resources/accounting/types/issue.py +++ b/src/merge/resources/accounting/types/issue.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .issue_status import IssueStatus -class Issue(pydantic_v1.BaseModel): +class Issue(UniversalBaseModel): id: typing.Optional[str] - status: typing.Optional[IssueStatus] = pydantic_v1.Field() + status: typing.Optional[IssueStatus] = pydantic.Field() """ Status of the issue. Options: ('ONGOING', 'RESOLVED') @@ -25,20 +26,11 @@ class Issue(pydantic_v1.BaseModel): is_muted: typing.Optional[bool] error_details: typing.Optional[typing.List[str]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/item.py b/src/merge/resources/accounting/types/item.py index 53354ebe..747c1923 100644 --- a/src/merge/resources/accounting/types/item.py +++ b/src/merge/resources/accounting/types/item.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .item_company import ItemCompany from .item_purchase_account import ItemPurchaseAccount from .item_sales_account import ItemSalesAccount @@ -12,7 +13,7 @@ from .remote_data import RemoteData -class Item(pydantic_v1.BaseModel): +class Item(UniversalBaseModel): """ # The Item Object @@ -26,27 +27,27 @@ class Item(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The item's name. """ - status: typing.Optional[ItemStatus] = pydantic_v1.Field() + status: typing.Optional[ItemStatus] = pydantic.Field() """ The item's status. @@ -54,37 +55,37 @@ class Item(pydantic_v1.BaseModel): - `ARCHIVED` - ARCHIVED """ - unit_price: typing.Optional[float] = pydantic_v1.Field() + unit_price: typing.Optional[float] = pydantic.Field() """ The item's unit price. """ - purchase_price: typing.Optional[float] = pydantic_v1.Field() + purchase_price: typing.Optional[float] = pydantic.Field() """ The price at which the item is purchased from a vendor. """ - purchase_account: typing.Optional[ItemPurchaseAccount] = pydantic_v1.Field() + purchase_account: typing.Optional[ItemPurchaseAccount] = pydantic.Field() """ References the default account used to record a purchase of the item. """ - sales_account: typing.Optional[ItemSalesAccount] = pydantic_v1.Field() + sales_account: typing.Optional[ItemSalesAccount] = pydantic.Field() """ References the default account used to record a sale. """ - company: typing.Optional[ItemCompany] = pydantic_v1.Field() + company: typing.Optional[ItemCompany] = pydantic.Field() """ The company the item belongs to. """ - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's item note was updated. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -92,20 +93,11 @@ class Item(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/journal_entry.py b/src/merge/resources/accounting/types/journal_entry.py index 942157b7..3bb688a5 100644 --- a/src/merge/resources/accounting/types/journal_entry.py +++ b/src/merge/resources/accounting/types/journal_entry.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .journal_entry_accounting_period import JournalEntryAccountingPeriod from .journal_entry_applied_payments_item import JournalEntryAppliedPaymentsItem from .journal_entry_company import JournalEntryCompany @@ -16,7 +17,7 @@ from .remote_data import RemoteData -class JournalEntry(pydantic_v1.BaseModel): +class JournalEntry(UniversalBaseModel): """ # The JournalEntry Object @@ -34,54 +35,52 @@ class JournalEntry(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - transaction_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + transaction_date: typing.Optional[dt.datetime] = pydantic.Field() """ The journal entry's transaction date. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's journal entry was created. """ - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's journal entry was updated. """ - payments: typing.Optional[typing.List[typing.Optional[JournalEntryPaymentsItem]]] = pydantic_v1.Field() + payments: typing.Optional[typing.List[typing.Optional[JournalEntryPaymentsItem]]] = pydantic.Field() """ Array of `Payment` object IDs. """ - applied_payments: typing.Optional[ - typing.List[typing.Optional[JournalEntryAppliedPaymentsItem]] - ] = pydantic_v1.Field() + applied_payments: typing.Optional[typing.List[typing.Optional[JournalEntryAppliedPaymentsItem]]] = pydantic.Field() """ A list of the Payment Applied to Lines common models related to a given Invoice, Credit Note, or Journal Entry. """ - memo: typing.Optional[str] = pydantic_v1.Field() + memo: typing.Optional[str] = pydantic.Field() """ The journal entry's private note. """ - currency: typing.Optional[JournalEntryCurrency] = pydantic_v1.Field() + currency: typing.Optional[JournalEntryCurrency] = pydantic.Field() """ The journal's currency. @@ -393,25 +392,25 @@ class JournalEntry(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The journal entry's exchange rate. """ - company: typing.Optional[JournalEntryCompany] = pydantic_v1.Field() + company: typing.Optional[JournalEntryCompany] = pydantic.Field() """ The company the journal entry belongs to. """ lines: typing.Optional[typing.List[JournalLine]] - journal_number: typing.Optional[str] = pydantic_v1.Field() + journal_number: typing.Optional[str] = pydantic.Field() """ Reference number for identifying journal entries. """ tracking_categories: typing.Optional[typing.List[typing.Optional[JournalEntryTrackingCategoriesItem]]] remote_was_deleted: typing.Optional[bool] - posting_status: typing.Optional[JournalEntryPostingStatus] = pydantic_v1.Field() + posting_status: typing.Optional[JournalEntryPostingStatus] = pydantic.Field() """ The journal's posting status. @@ -419,7 +418,7 @@ class JournalEntry(pydantic_v1.BaseModel): - `POSTED` - POSTED """ - accounting_period: typing.Optional[JournalEntryAccountingPeriod] = pydantic_v1.Field() + accounting_period: typing.Optional[JournalEntryAccountingPeriod] = pydantic.Field() """ The accounting period that the JournalEntry was generated in. """ @@ -427,20 +426,11 @@ class JournalEntry(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/journal_entry_request.py b/src/merge/resources/accounting/types/journal_entry_request.py index 8a651811..74f17374 100644 --- a/src/merge/resources/accounting/types/journal_entry_request.py +++ b/src/merge/resources/accounting/types/journal_entry_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .journal_entry_request_company import JournalEntryRequestCompany from .journal_entry_request_currency import JournalEntryRequestCurrency from .journal_entry_request_payments_item import JournalEntryRequestPaymentsItem @@ -13,7 +14,7 @@ from .journal_line_request import JournalLineRequest -class JournalEntryRequest(pydantic_v1.BaseModel): +class JournalEntryRequest(UniversalBaseModel): """ # The JournalEntry Object @@ -26,22 +27,22 @@ class JournalEntryRequest(pydantic_v1.BaseModel): Fetch from the `GET JournalEntry` endpoint and view a company's journey entry. """ - transaction_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + transaction_date: typing.Optional[dt.datetime] = pydantic.Field() """ The journal entry's transaction date. """ - payments: typing.Optional[typing.List[typing.Optional[JournalEntryRequestPaymentsItem]]] = pydantic_v1.Field() + payments: typing.Optional[typing.List[typing.Optional[JournalEntryRequestPaymentsItem]]] = pydantic.Field() """ Array of `Payment` object IDs. """ - memo: typing.Optional[str] = pydantic_v1.Field() + memo: typing.Optional[str] = pydantic.Field() """ The journal entry's private note. """ - currency: typing.Optional[JournalEntryRequestCurrency] = pydantic_v1.Field() + currency: typing.Optional[JournalEntryRequestCurrency] = pydantic.Field() """ The journal's currency. @@ -353,24 +354,24 @@ class JournalEntryRequest(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The journal entry's exchange rate. """ - company: typing.Optional[JournalEntryRequestCompany] = pydantic_v1.Field() + company: typing.Optional[JournalEntryRequestCompany] = pydantic.Field() """ The company the journal entry belongs to. """ tracking_categories: typing.Optional[typing.List[typing.Optional[JournalEntryRequestTrackingCategoriesItem]]] lines: typing.Optional[typing.List[JournalLineRequest]] - journal_number: typing.Optional[str] = pydantic_v1.Field() + journal_number: typing.Optional[str] = pydantic.Field() """ Reference number for identifying journal entries. """ - posting_status: typing.Optional[JournalEntryRequestPostingStatus] = pydantic_v1.Field() + posting_status: typing.Optional[JournalEntryRequestPostingStatus] = pydantic.Field() """ The journal's posting status. @@ -381,20 +382,11 @@ class JournalEntryRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/journal_entry_response.py b/src/merge/resources/accounting/types/journal_entry_response.py index 1463efca..efb9cc02 100644 --- a/src/merge/resources/accounting/types/journal_entry_response.py +++ b/src/merge/resources/accounting/types/journal_entry_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .journal_entry import JournalEntry from .warning_validation_problem import WarningValidationProblem -class JournalEntryResponse(pydantic_v1.BaseModel): +class JournalEntryResponse(UniversalBaseModel): model: JournalEntry warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/journal_line.py b/src/merge/resources/accounting/types/journal_line.py index fc79526f..b2787745 100644 --- a/src/merge/resources/accounting/types/journal_line.py +++ b/src/merge/resources/accounting/types/journal_line.py @@ -3,15 +3,16 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .journal_line_account import JournalLineAccount from .journal_line_currency import JournalLineCurrency from .journal_line_tracking_categories_item import JournalLineTrackingCategoriesItem from .journal_line_tracking_category import JournalLineTrackingCategory -class JournalLine(pydantic_v1.BaseModel): +class JournalLine(UniversalBaseModel): """ # The JournalLine Object @@ -25,30 +26,30 @@ class JournalLine(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ account: typing.Optional[JournalLineAccount] - net_amount: typing.Optional[float] = pydantic_v1.Field() + net_amount: typing.Optional[float] = pydantic.Field() """ The value of the line item including taxes and other fees. """ tracking_category: typing.Optional[JournalLineTrackingCategory] tracking_categories: typing.Optional[typing.List[typing.Optional[JournalLineTrackingCategoriesItem]]] - currency: typing.Optional[JournalLineCurrency] = pydantic_v1.Field() + currency: typing.Optional[JournalLineCurrency] = pydantic.Field() """ The journal line item's currency. @@ -360,41 +361,32 @@ class JournalLine(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the journal entry belongs to. """ contact: typing.Optional[str] - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The line's description. """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The journal line item's exchange rate. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/journal_line_request.py b/src/merge/resources/accounting/types/journal_line_request.py index cb21b79f..30e8d79a 100644 --- a/src/merge/resources/accounting/types/journal_line_request.py +++ b/src/merge/resources/accounting/types/journal_line_request.py @@ -1,17 +1,17 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .journal_line_request_account import JournalLineRequestAccount from .journal_line_request_currency import JournalLineRequestCurrency from .journal_line_request_tracking_categories_item import JournalLineRequestTrackingCategoriesItem from .journal_line_request_tracking_category import JournalLineRequestTrackingCategory -class JournalLineRequest(pydantic_v1.BaseModel): +class JournalLineRequest(UniversalBaseModel): """ # The JournalLine Object @@ -24,20 +24,20 @@ class JournalLineRequest(pydantic_v1.BaseModel): Fetch from the `GET JournalEntry` endpoint and view the journal entry's line items. """ - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ account: typing.Optional[JournalLineRequestAccount] - net_amount: typing.Optional[float] = pydantic_v1.Field() + net_amount: typing.Optional[float] = pydantic.Field() """ The value of the line item including taxes and other fees. """ tracking_category: typing.Optional[JournalLineRequestTrackingCategory] tracking_categories: typing.Optional[typing.List[typing.Optional[JournalLineRequestTrackingCategoriesItem]]] - currency: typing.Optional[JournalLineRequestCurrency] = pydantic_v1.Field() + currency: typing.Optional[JournalLineRequestCurrency] = pydantic.Field() """ The journal line item's currency. @@ -349,18 +349,18 @@ class JournalLineRequest(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the journal entry belongs to. """ contact: typing.Optional[str] - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The line's description. """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The journal line item's exchange rate. """ @@ -368,20 +368,11 @@ class JournalLineRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/link_token.py b/src/merge/resources/accounting/types/link_token.py index 1c82d1ac..87c88faf 100644 --- a/src/merge/resources/accounting/types/link_token.py +++ b/src/merge/resources/accounting/types/link_token.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class LinkToken(pydantic_v1.BaseModel): + +class LinkToken(UniversalBaseModel): link_token: str integration_name: typing.Optional[str] magic_link_url: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/linked_account_status.py b/src/merge/resources/accounting/types/linked_account_status.py index 60e21a98..34184012 100644 --- a/src/merge/resources/accounting/types/linked_account_status.py +++ b/src/merge/resources/accounting/types/linked_account_status.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class LinkedAccountStatus(pydantic_v1.BaseModel): + +class LinkedAccountStatus(UniversalBaseModel): linked_account_status: str can_make_request: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/meta_response.py b/src/merge/resources/accounting/types/meta_response.py index debaf4ef..27e02126 100644 --- a/src/merge/resources/accounting/types/meta_response.py +++ b/src/merge/resources/accounting/types/meta_response.py @@ -1,34 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .linked_account_status import LinkedAccountStatus -class MetaResponse(pydantic_v1.BaseModel): +class MetaResponse(UniversalBaseModel): request_schema: typing.Dict[str, typing.Any] remote_field_classes: typing.Optional[typing.Dict[str, typing.Any]] status: typing.Optional[LinkedAccountStatus] has_conditional_params: bool has_required_linked_account_params: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/model_operation.py b/src/merge/resources/accounting/types/model_operation.py index 0f4429ec..efe8355e 100644 --- a/src/merge/resources/accounting/types/model_operation.py +++ b/src/merge/resources/accounting/types/model_operation.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelOperation(pydantic_v1.BaseModel): + +class ModelOperation(UniversalBaseModel): """ # The ModelOperation Object @@ -25,20 +25,11 @@ class ModelOperation(pydantic_v1.BaseModel): required_post_parameters: typing.List[str] supported_fields: typing.List[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/model_permission_deserializer.py b/src/merge/resources/accounting/types/model_permission_deserializer.py index 5a6adf20..14bc4f99 100644 --- a/src/merge/resources/accounting/types/model_permission_deserializer.py +++ b/src/merge/resources/accounting/types/model_permission_deserializer.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelPermissionDeserializer(pydantic_v1.BaseModel): - is_enabled: typing.Optional[bool] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ModelPermissionDeserializer(UniversalBaseModel): + is_enabled: typing.Optional[bool] - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/model_permission_deserializer_request.py b/src/merge/resources/accounting/types/model_permission_deserializer_request.py index 3f72b9ac..cc2e7f77 100644 --- a/src/merge/resources/accounting/types/model_permission_deserializer_request.py +++ b/src/merge/resources/accounting/types/model_permission_deserializer_request.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelPermissionDeserializerRequest(pydantic_v1.BaseModel): - is_enabled: typing.Optional[bool] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ModelPermissionDeserializerRequest(UniversalBaseModel): + is_enabled: typing.Optional[bool] - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/multipart_form_field_request.py b/src/merge/resources/accounting/types/multipart_form_field_request.py index 9c8ffb21..b6a6c708 100644 --- a/src/merge/resources/accounting/types/multipart_form_field_request.py +++ b/src/merge/resources/accounting/types/multipart_form_field_request.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .multipart_form_field_request_encoding import MultipartFormFieldRequestEncoding -class MultipartFormFieldRequest(pydantic_v1.BaseModel): +class MultipartFormFieldRequest(UniversalBaseModel): """ # The MultipartFormField Object @@ -21,17 +21,17 @@ class MultipartFormFieldRequest(pydantic_v1.BaseModel): Create a `MultipartFormField` to define a multipart form entry. """ - name: str = pydantic_v1.Field() + name: str = pydantic.Field() """ The name of the form field """ - data: str = pydantic_v1.Field() + data: str = pydantic.Field() """ The data for the form field. """ - encoding: typing.Optional[MultipartFormFieldRequestEncoding] = pydantic_v1.Field() + encoding: typing.Optional[MultipartFormFieldRequestEncoding] = pydantic.Field() """ The encoding of the value of `data`. Defaults to `RAW` if not defined. @@ -40,30 +40,21 @@ class MultipartFormFieldRequest(pydantic_v1.BaseModel): - `GZIP_BASE64` - GZIP_BASE64 """ - file_name: typing.Optional[str] = pydantic_v1.Field() + file_name: typing.Optional[str] = pydantic.Field() """ The file name of the form field, if the field is for a file. """ - content_type: typing.Optional[str] = pydantic_v1.Field() + content_type: typing.Optional[str] = pydantic.Field() """ The MIME type of the file, if the field is for a file. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_account_details_and_actions_list.py b/src/merge/resources/accounting/types/paginated_account_details_and_actions_list.py index 280100c4..07323330 100644 --- a/src/merge/resources/accounting/types/paginated_account_details_and_actions_list.py +++ b/src/merge/resources/accounting/types/paginated_account_details_and_actions_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_details_and_actions import AccountDetailsAndActions -class PaginatedAccountDetailsAndActionsList(pydantic_v1.BaseModel): +class PaginatedAccountDetailsAndActionsList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[AccountDetailsAndActions]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_account_list.py b/src/merge/resources/accounting/types/paginated_account_list.py index 21d2cda1..d9be284b 100644 --- a/src/merge/resources/accounting/types/paginated_account_list.py +++ b/src/merge/resources/accounting/types/paginated_account_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account import Account -class PaginatedAccountList(pydantic_v1.BaseModel): +class PaginatedAccountList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Account]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_accounting_attachment_list.py b/src/merge/resources/accounting/types/paginated_accounting_attachment_list.py index f9baa36f..e8034584 100644 --- a/src/merge/resources/accounting/types/paginated_accounting_attachment_list.py +++ b/src/merge/resources/accounting/types/paginated_accounting_attachment_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .accounting_attachment import AccountingAttachment -class PaginatedAccountingAttachmentList(pydantic_v1.BaseModel): +class PaginatedAccountingAttachmentList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[AccountingAttachment]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_accounting_period_list.py b/src/merge/resources/accounting/types/paginated_accounting_period_list.py index d7dcad2d..46c80331 100644 --- a/src/merge/resources/accounting/types/paginated_accounting_period_list.py +++ b/src/merge/resources/accounting/types/paginated_accounting_period_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .accounting_period import AccountingPeriod -class PaginatedAccountingPeriodList(pydantic_v1.BaseModel): +class PaginatedAccountingPeriodList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[AccountingPeriod]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_audit_log_event_list.py b/src/merge/resources/accounting/types/paginated_audit_log_event_list.py index 1d4154d2..e5e04fa7 100644 --- a/src/merge/resources/accounting/types/paginated_audit_log_event_list.py +++ b/src/merge/resources/accounting/types/paginated_audit_log_event_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .audit_log_event import AuditLogEvent -class PaginatedAuditLogEventList(pydantic_v1.BaseModel): +class PaginatedAuditLogEventList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[AuditLogEvent]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_balance_sheet_list.py b/src/merge/resources/accounting/types/paginated_balance_sheet_list.py index 00716e0a..346783ee 100644 --- a/src/merge/resources/accounting/types/paginated_balance_sheet_list.py +++ b/src/merge/resources/accounting/types/paginated_balance_sheet_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .balance_sheet import BalanceSheet -class PaginatedBalanceSheetList(pydantic_v1.BaseModel): +class PaginatedBalanceSheetList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[BalanceSheet]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_cash_flow_statement_list.py b/src/merge/resources/accounting/types/paginated_cash_flow_statement_list.py index 2960e23e..0eaf206c 100644 --- a/src/merge/resources/accounting/types/paginated_cash_flow_statement_list.py +++ b/src/merge/resources/accounting/types/paginated_cash_flow_statement_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .cash_flow_statement import CashFlowStatement -class PaginatedCashFlowStatementList(pydantic_v1.BaseModel): +class PaginatedCashFlowStatementList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[CashFlowStatement]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_company_info_list.py b/src/merge/resources/accounting/types/paginated_company_info_list.py index 3f31d2c2..a8829de1 100644 --- a/src/merge/resources/accounting/types/paginated_company_info_list.py +++ b/src/merge/resources/accounting/types/paginated_company_info_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .company_info import CompanyInfo -class PaginatedCompanyInfoList(pydantic_v1.BaseModel): +class PaginatedCompanyInfoList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[CompanyInfo]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_contact_list.py b/src/merge/resources/accounting/types/paginated_contact_list.py index dbe7c925..5e9c3fb7 100644 --- a/src/merge/resources/accounting/types/paginated_contact_list.py +++ b/src/merge/resources/accounting/types/paginated_contact_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .contact import Contact -class PaginatedContactList(pydantic_v1.BaseModel): +class PaginatedContactList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Contact]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_credit_note_list.py b/src/merge/resources/accounting/types/paginated_credit_note_list.py index 5ea1ac05..8919e2e5 100644 --- a/src/merge/resources/accounting/types/paginated_credit_note_list.py +++ b/src/merge/resources/accounting/types/paginated_credit_note_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .credit_note import CreditNote -class PaginatedCreditNoteList(pydantic_v1.BaseModel): +class PaginatedCreditNoteList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[CreditNote]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_expense_list.py b/src/merge/resources/accounting/types/paginated_expense_list.py index aae906c4..e1ec252c 100644 --- a/src/merge/resources/accounting/types/paginated_expense_list.py +++ b/src/merge/resources/accounting/types/paginated_expense_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .expense import Expense -class PaginatedExpenseList(pydantic_v1.BaseModel): +class PaginatedExpenseList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Expense]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_income_statement_list.py b/src/merge/resources/accounting/types/paginated_income_statement_list.py index 15c59bb6..abd89065 100644 --- a/src/merge/resources/accounting/types/paginated_income_statement_list.py +++ b/src/merge/resources/accounting/types/paginated_income_statement_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .income_statement import IncomeStatement -class PaginatedIncomeStatementList(pydantic_v1.BaseModel): +class PaginatedIncomeStatementList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[IncomeStatement]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_invoice_list.py b/src/merge/resources/accounting/types/paginated_invoice_list.py index 52457121..4a3d41ce 100644 --- a/src/merge/resources/accounting/types/paginated_invoice_list.py +++ b/src/merge/resources/accounting/types/paginated_invoice_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .invoice import Invoice -class PaginatedInvoiceList(pydantic_v1.BaseModel): +class PaginatedInvoiceList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Invoice]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_issue_list.py b/src/merge/resources/accounting/types/paginated_issue_list.py index 1016e29a..da8437f1 100644 --- a/src/merge/resources/accounting/types/paginated_issue_list.py +++ b/src/merge/resources/accounting/types/paginated_issue_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .issue import Issue -class PaginatedIssueList(pydantic_v1.BaseModel): +class PaginatedIssueList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Issue]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_item_list.py b/src/merge/resources/accounting/types/paginated_item_list.py index 5964764c..a81f6520 100644 --- a/src/merge/resources/accounting/types/paginated_item_list.py +++ b/src/merge/resources/accounting/types/paginated_item_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .item import Item -class PaginatedItemList(pydantic_v1.BaseModel): +class PaginatedItemList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Item]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_journal_entry_list.py b/src/merge/resources/accounting/types/paginated_journal_entry_list.py index 2e120953..ad8ab142 100644 --- a/src/merge/resources/accounting/types/paginated_journal_entry_list.py +++ b/src/merge/resources/accounting/types/paginated_journal_entry_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .journal_entry import JournalEntry -class PaginatedJournalEntryList(pydantic_v1.BaseModel): +class PaginatedJournalEntryList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[JournalEntry]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_payment_list.py b/src/merge/resources/accounting/types/paginated_payment_list.py index 76fa6efc..90c2d9b4 100644 --- a/src/merge/resources/accounting/types/paginated_payment_list.py +++ b/src/merge/resources/accounting/types/paginated_payment_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .payment import Payment -class PaginatedPaymentList(pydantic_v1.BaseModel): +class PaginatedPaymentList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Payment]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_purchase_order_list.py b/src/merge/resources/accounting/types/paginated_purchase_order_list.py index fef2d120..2d89e9a5 100644 --- a/src/merge/resources/accounting/types/paginated_purchase_order_list.py +++ b/src/merge/resources/accounting/types/paginated_purchase_order_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .purchase_order import PurchaseOrder -class PaginatedPurchaseOrderList(pydantic_v1.BaseModel): +class PaginatedPurchaseOrderList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[PurchaseOrder]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_sync_status_list.py b/src/merge/resources/accounting/types/paginated_sync_status_list.py index 6c88197e..7faca80c 100644 --- a/src/merge/resources/accounting/types/paginated_sync_status_list.py +++ b/src/merge/resources/accounting/types/paginated_sync_status_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .sync_status import SyncStatus -class PaginatedSyncStatusList(pydantic_v1.BaseModel): +class PaginatedSyncStatusList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[SyncStatus]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_tax_rate_list.py b/src/merge/resources/accounting/types/paginated_tax_rate_list.py index 08dc426b..ecb8274d 100644 --- a/src/merge/resources/accounting/types/paginated_tax_rate_list.py +++ b/src/merge/resources/accounting/types/paginated_tax_rate_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .tax_rate import TaxRate -class PaginatedTaxRateList(pydantic_v1.BaseModel): +class PaginatedTaxRateList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[TaxRate]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_tracking_category_list.py b/src/merge/resources/accounting/types/paginated_tracking_category_list.py index 67d4fbae..5f175a14 100644 --- a/src/merge/resources/accounting/types/paginated_tracking_category_list.py +++ b/src/merge/resources/accounting/types/paginated_tracking_category_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .tracking_category import TrackingCategory -class PaginatedTrackingCategoryList(pydantic_v1.BaseModel): +class PaginatedTrackingCategoryList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[TrackingCategory]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_transaction_list.py b/src/merge/resources/accounting/types/paginated_transaction_list.py index 4f73938b..707bb479 100644 --- a/src/merge/resources/accounting/types/paginated_transaction_list.py +++ b/src/merge/resources/accounting/types/paginated_transaction_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .transaction import Transaction -class PaginatedTransactionList(pydantic_v1.BaseModel): +class PaginatedTransactionList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Transaction]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/paginated_vendor_credit_list.py b/src/merge/resources/accounting/types/paginated_vendor_credit_list.py index 69fbf3ce..7825233e 100644 --- a/src/merge/resources/accounting/types/paginated_vendor_credit_list.py +++ b/src/merge/resources/accounting/types/paginated_vendor_credit_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .vendor_credit import VendorCredit -class PaginatedVendorCreditList(pydantic_v1.BaseModel): +class PaginatedVendorCreditList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[VendorCredit]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/patched_payment_request.py b/src/merge/resources/accounting/types/patched_payment_request.py index c44daa71..20ef4714 100644 --- a/src/merge/resources/accounting/types/patched_payment_request.py +++ b/src/merge/resources/accounting/types/patched_payment_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .patched_payment_request_account import PatchedPaymentRequestAccount from .patched_payment_request_accounting_period import PatchedPaymentRequestAccountingPeriod from .patched_payment_request_applied_to_lines_item import PatchedPaymentRequestAppliedToLinesItem @@ -15,7 +16,7 @@ from .patched_payment_request_type import PatchedPaymentRequestType -class PatchedPaymentRequest(pydantic_v1.BaseModel): +class PatchedPaymentRequest(UniversalBaseModel): """ # The Payment Object @@ -28,22 +29,22 @@ class PatchedPaymentRequest(pydantic_v1.BaseModel): Fetch from the `GET Payment` endpoint and view an invoice's payment. """ - transaction_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + transaction_date: typing.Optional[dt.datetime] = pydantic.Field() """ The payment's transaction date. """ - contact: typing.Optional[PatchedPaymentRequestContact] = pydantic_v1.Field() + contact: typing.Optional[PatchedPaymentRequestContact] = pydantic.Field() """ The supplier, or customer involved in the payment. """ - account: typing.Optional[PatchedPaymentRequestAccount] = pydantic_v1.Field() + account: typing.Optional[PatchedPaymentRequestAccount] = pydantic.Field() """ The supplier’s or customer’s account in which the payment is made. """ - currency: typing.Optional[PatchedPaymentRequestCurrency] = pydantic_v1.Field() + currency: typing.Optional[PatchedPaymentRequestCurrency] = pydantic.Field() """ The payment's currency. @@ -355,22 +356,22 @@ class PatchedPaymentRequest(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The payment's exchange rate. """ - company: typing.Optional[PatchedPaymentRequestCompany] = pydantic_v1.Field() + company: typing.Optional[PatchedPaymentRequestCompany] = pydantic.Field() """ The company the payment belongs to. """ - total_amount: typing.Optional[float] = pydantic_v1.Field() + total_amount: typing.Optional[float] = pydantic.Field() """ The total amount of money being paid to the supplier, or customer, after taxes. """ - type: typing.Optional[PatchedPaymentRequestType] = pydantic_v1.Field() + type: typing.Optional[PatchedPaymentRequestType] = pydantic.Field() """ The type of the invoice. @@ -379,12 +380,12 @@ class PatchedPaymentRequest(pydantic_v1.BaseModel): """ tracking_categories: typing.Optional[typing.List[typing.Optional[PatchedPaymentRequestTrackingCategoriesItem]]] - accounting_period: typing.Optional[PatchedPaymentRequestAccountingPeriod] = pydantic_v1.Field() + accounting_period: typing.Optional[PatchedPaymentRequestAccountingPeriod] = pydantic.Field() """ The accounting period that the Payment was generated in. """ - applied_to_lines: typing.Optional[typing.List[PatchedPaymentRequestAppliedToLinesItem]] = pydantic_v1.Field() + applied_to_lines: typing.Optional[typing.List[PatchedPaymentRequestAppliedToLinesItem]] = pydantic.Field() """ A list of “Payment Applied to Lines” objects. """ @@ -392,20 +393,11 @@ class PatchedPaymentRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/payment.py b/src/merge/resources/accounting/types/payment.py index b191449d..2e5463a8 100644 --- a/src/merge/resources/accounting/types/payment.py +++ b/src/merge/resources/accounting/types/payment.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .payment_account import PaymentAccount from .payment_accounting_period import PaymentAccountingPeriod from .payment_applied_to_lines_item import PaymentAppliedToLinesItem @@ -16,7 +17,7 @@ from .remote_data import RemoteData -class Payment(pydantic_v1.BaseModel): +class Payment(UniversalBaseModel): """ # The Payment Object @@ -30,37 +31,37 @@ class Payment(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - transaction_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + transaction_date: typing.Optional[dt.datetime] = pydantic.Field() """ The payment's transaction date. """ - contact: typing.Optional[PaymentContact] = pydantic_v1.Field() + contact: typing.Optional[PaymentContact] = pydantic.Field() """ The supplier, or customer involved in the payment. """ - account: typing.Optional[PaymentAccount] = pydantic_v1.Field() + account: typing.Optional[PaymentAccount] = pydantic.Field() """ The supplier’s or customer’s account in which the payment is made. """ - currency: typing.Optional[PaymentCurrency] = pydantic_v1.Field() + currency: typing.Optional[PaymentCurrency] = pydantic.Field() """ The payment's currency. @@ -372,22 +373,22 @@ class Payment(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The payment's exchange rate. """ - company: typing.Optional[PaymentCompany] = pydantic_v1.Field() + company: typing.Optional[PaymentCompany] = pydantic.Field() """ The company the payment belongs to. """ - total_amount: typing.Optional[float] = pydantic_v1.Field() + total_amount: typing.Optional[float] = pydantic.Field() """ The total amount of money being paid to the supplier, or customer, after taxes. """ - type: typing.Optional[PaymentType] = pydantic_v1.Field() + type: typing.Optional[PaymentType] = pydantic.Field() """ The type of the invoice. @@ -396,22 +397,22 @@ class Payment(pydantic_v1.BaseModel): """ tracking_categories: typing.Optional[typing.List[typing.Optional[PaymentTrackingCategoriesItem]]] - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's payment entry was updated. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ - accounting_period: typing.Optional[PaymentAccountingPeriod] = pydantic_v1.Field() + accounting_period: typing.Optional[PaymentAccountingPeriod] = pydantic.Field() """ The accounting period that the Payment was generated in. """ - applied_to_lines: typing.Optional[typing.List[PaymentAppliedToLinesItem]] = pydantic_v1.Field() + applied_to_lines: typing.Optional[typing.List[PaymentAppliedToLinesItem]] = pydantic.Field() """ A list of “Payment Applied to Lines” objects. """ @@ -419,20 +420,11 @@ class Payment(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/payment_line_item.py b/src/merge/resources/accounting/types/payment_line_item.py index 335d7235..8c72349f 100644 --- a/src/merge/resources/accounting/types/payment_line_item.py +++ b/src/merge/resources/accounting/types/payment_line_item.py @@ -3,11 +3,12 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class PaymentLineItem(pydantic_v1.BaseModel): + +class PaymentLineItem(UniversalBaseModel): """ # The PaymentLineItem Object @@ -21,55 +22,46 @@ class PaymentLineItem(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - applied_amount: typing.Optional[str] = pydantic_v1.Field() + applied_amount: typing.Optional[str] = pydantic.Field() """ The amount being applied to the transaction. """ - applied_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + applied_date: typing.Optional[dt.datetime] = pydantic.Field() """ The date the payment portion is applied. """ - related_object_id: typing.Optional[str] = pydantic_v1.Field() + related_object_id: typing.Optional[str] = pydantic.Field() """ The Merge ID of the transaction the payment portion is being applied to. """ - related_object_type: typing.Optional[str] = pydantic_v1.Field() + related_object_type: typing.Optional[str] = pydantic.Field() """ The type of transaction the payment portion is being applied to. Possible values include: INVOICE, JOURNAL_ENTRY, or CREDIT_NOTE. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/payment_line_item_request.py b/src/merge/resources/accounting/types/payment_line_item_request.py index 8a855ea5..b20c6d78 100644 --- a/src/merge/resources/accounting/types/payment_line_item_request.py +++ b/src/merge/resources/accounting/types/payment_line_item_request.py @@ -3,11 +3,12 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class PaymentLineItemRequest(pydantic_v1.BaseModel): + +class PaymentLineItemRequest(UniversalBaseModel): """ # The PaymentLineItem Object @@ -20,27 +21,27 @@ class PaymentLineItemRequest(pydantic_v1.BaseModel): `Payment` will have a field called `applied-to-lines` which will be an array of `PaymentLineItemInternalMappingSerializer` objects that can either be a `Invoice`, `CreditNote`, or `JournalEntry`. """ - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - applied_amount: typing.Optional[str] = pydantic_v1.Field() + applied_amount: typing.Optional[str] = pydantic.Field() """ The amount being applied to the transaction. """ - applied_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + applied_date: typing.Optional[dt.datetime] = pydantic.Field() """ The date the payment portion is applied. """ - related_object_id: typing.Optional[str] = pydantic_v1.Field() + related_object_id: typing.Optional[str] = pydantic.Field() """ The Merge ID of the transaction the payment portion is being applied to. """ - related_object_type: typing.Optional[str] = pydantic_v1.Field() + related_object_type: typing.Optional[str] = pydantic.Field() """ The type of transaction the payment portion is being applied to. Possible values include: INVOICE, JOURNAL_ENTRY, or CREDIT_NOTE. """ @@ -48,20 +49,11 @@ class PaymentLineItemRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/payment_request.py b/src/merge/resources/accounting/types/payment_request.py index ef524552..fd74df00 100644 --- a/src/merge/resources/accounting/types/payment_request.py +++ b/src/merge/resources/accounting/types/payment_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .payment_request_account import PaymentRequestAccount from .payment_request_accounting_period import PaymentRequestAccountingPeriod from .payment_request_applied_to_lines_item import PaymentRequestAppliedToLinesItem @@ -15,7 +16,7 @@ from .payment_request_type import PaymentRequestType -class PaymentRequest(pydantic_v1.BaseModel): +class PaymentRequest(UniversalBaseModel): """ # The Payment Object @@ -28,22 +29,22 @@ class PaymentRequest(pydantic_v1.BaseModel): Fetch from the `GET Payment` endpoint and view an invoice's payment. """ - transaction_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + transaction_date: typing.Optional[dt.datetime] = pydantic.Field() """ The payment's transaction date. """ - contact: typing.Optional[PaymentRequestContact] = pydantic_v1.Field() + contact: typing.Optional[PaymentRequestContact] = pydantic.Field() """ The supplier, or customer involved in the payment. """ - account: typing.Optional[PaymentRequestAccount] = pydantic_v1.Field() + account: typing.Optional[PaymentRequestAccount] = pydantic.Field() """ The supplier’s or customer’s account in which the payment is made. """ - currency: typing.Optional[PaymentRequestCurrency] = pydantic_v1.Field() + currency: typing.Optional[PaymentRequestCurrency] = pydantic.Field() """ The payment's currency. @@ -355,22 +356,22 @@ class PaymentRequest(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The payment's exchange rate. """ - company: typing.Optional[PaymentRequestCompany] = pydantic_v1.Field() + company: typing.Optional[PaymentRequestCompany] = pydantic.Field() """ The company the payment belongs to. """ - total_amount: typing.Optional[float] = pydantic_v1.Field() + total_amount: typing.Optional[float] = pydantic.Field() """ The total amount of money being paid to the supplier, or customer, after taxes. """ - type: typing.Optional[PaymentRequestType] = pydantic_v1.Field() + type: typing.Optional[PaymentRequestType] = pydantic.Field() """ The type of the invoice. @@ -379,12 +380,12 @@ class PaymentRequest(pydantic_v1.BaseModel): """ tracking_categories: typing.Optional[typing.List[typing.Optional[PaymentRequestTrackingCategoriesItem]]] - accounting_period: typing.Optional[PaymentRequestAccountingPeriod] = pydantic_v1.Field() + accounting_period: typing.Optional[PaymentRequestAccountingPeriod] = pydantic.Field() """ The accounting period that the Payment was generated in. """ - applied_to_lines: typing.Optional[typing.List[PaymentRequestAppliedToLinesItem]] = pydantic_v1.Field() + applied_to_lines: typing.Optional[typing.List[PaymentRequestAppliedToLinesItem]] = pydantic.Field() """ A list of “Payment Applied to Lines” objects. """ @@ -392,20 +393,11 @@ class PaymentRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/payment_response.py b/src/merge/resources/accounting/types/payment_response.py index 6c1baf81..96614c29 100644 --- a/src/merge/resources/accounting/types/payment_response.py +++ b/src/merge/resources/accounting/types/payment_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .payment import Payment from .warning_validation_problem import WarningValidationProblem -class PaymentResponse(pydantic_v1.BaseModel): +class PaymentResponse(UniversalBaseModel): model: Payment warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/purchase_order.py b/src/merge/resources/accounting/types/purchase_order.py index 382e045c..f21ff6f4 100644 --- a/src/merge/resources/accounting/types/purchase_order.py +++ b/src/merge/resources/accounting/types/purchase_order.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .purchase_order_accounting_period import PurchaseOrderAccountingPeriod from .purchase_order_company import PurchaseOrderCompany from .purchase_order_currency import PurchaseOrderCurrency @@ -16,7 +17,7 @@ from .remote_data import RemoteData -class PurchaseOrder(pydantic_v1.BaseModel): +class PurchaseOrder(UniversalBaseModel): """ # The PurchaseOrder Object @@ -32,22 +33,22 @@ class PurchaseOrder(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - status: typing.Optional[PurchaseOrderStatus] = pydantic_v1.Field() + status: typing.Optional[PurchaseOrderStatus] = pydantic.Field() """ The purchase order's status. @@ -58,52 +59,52 @@ class PurchaseOrder(pydantic_v1.BaseModel): - `DELETED` - DELETED """ - issue_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + issue_date: typing.Optional[dt.datetime] = pydantic.Field() """ The purchase order's issue date. """ - purchase_order_number: typing.Optional[str] = pydantic_v1.Field() + purchase_order_number: typing.Optional[str] = pydantic.Field() """ The human-readable number of the purchase order. """ - delivery_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + delivery_date: typing.Optional[dt.datetime] = pydantic.Field() """ The purchase order's delivery date. """ - delivery_address: typing.Optional[PurchaseOrderDeliveryAddress] = pydantic_v1.Field() + delivery_address: typing.Optional[PurchaseOrderDeliveryAddress] = pydantic.Field() """ The purchase order's delivery address. """ - customer: typing.Optional[str] = pydantic_v1.Field() + customer: typing.Optional[str] = pydantic.Field() """ The contact making the purchase order. """ - vendor: typing.Optional[PurchaseOrderVendor] = pydantic_v1.Field() + vendor: typing.Optional[PurchaseOrderVendor] = pydantic.Field() """ The party fulfilling the purchase order. """ - memo: typing.Optional[str] = pydantic_v1.Field() + memo: typing.Optional[str] = pydantic.Field() """ A memo attached to the purchase order. """ - company: typing.Optional[PurchaseOrderCompany] = pydantic_v1.Field() + company: typing.Optional[PurchaseOrderCompany] = pydantic.Field() """ The company the purchase order belongs to. """ - total_amount: typing.Optional[float] = pydantic_v1.Field() + total_amount: typing.Optional[float] = pydantic.Field() """ The purchase order's total amount. """ - currency: typing.Optional[PurchaseOrderCurrency] = pydantic_v1.Field() + currency: typing.Optional[PurchaseOrderCurrency] = pydantic.Field() """ The purchase order's currency. @@ -415,29 +416,29 @@ class PurchaseOrder(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The purchase order's exchange rate. """ line_items: typing.Optional[typing.List[PurchaseOrderLineItem]] tracking_categories: typing.Optional[typing.List[typing.Optional[PurchaseOrderTrackingCategoriesItem]]] - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's purchase order note was created. """ - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's purchase order note was updated. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ - accounting_period: typing.Optional[PurchaseOrderAccountingPeriod] = pydantic_v1.Field() + accounting_period: typing.Optional[PurchaseOrderAccountingPeriod] = pydantic.Field() """ The accounting period that the PurchaseOrder was generated in. """ @@ -445,20 +446,11 @@ class PurchaseOrder(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/purchase_order_line_item.py b/src/merge/resources/accounting/types/purchase_order_line_item.py index fdfdc4ca..006ca767 100644 --- a/src/merge/resources/accounting/types/purchase_order_line_item.py +++ b/src/merge/resources/accounting/types/purchase_order_line_item.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .purchase_order_line_item_currency import PurchaseOrderLineItemCurrency from .purchase_order_line_item_item import PurchaseOrderLineItemItem -class PurchaseOrderLineItem(pydantic_v1.BaseModel): +class PurchaseOrderLineItem(UniversalBaseModel): """ # The PurchaseOrderLineItem Object @@ -23,63 +24,63 @@ class PurchaseOrderLineItem(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ A description of the good being purchased. """ - unit_price: typing.Optional[float] = pydantic_v1.Field() + unit_price: typing.Optional[float] = pydantic.Field() """ The line item's unit price. """ - quantity: typing.Optional[float] = pydantic_v1.Field() + quantity: typing.Optional[float] = pydantic.Field() """ The line item's quantity. """ item: typing.Optional[PurchaseOrderLineItemItem] - account: typing.Optional[str] = pydantic_v1.Field() + account: typing.Optional[str] = pydantic.Field() """ The purchase order line item's account. """ - tracking_category: typing.Optional[str] = pydantic_v1.Field() + tracking_category: typing.Optional[str] = pydantic.Field() """ The purchase order line item's associated tracking category. """ - tracking_categories: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + tracking_categories: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ The purchase order line item's associated tracking categories. """ - tax_amount: typing.Optional[str] = pydantic_v1.Field() + tax_amount: typing.Optional[str] = pydantic.Field() """ The purchase order line item's tax amount. """ - total_line_amount: typing.Optional[str] = pydantic_v1.Field() + total_line_amount: typing.Optional[str] = pydantic.Field() """ The purchase order line item's total amount. """ - currency: typing.Optional[PurchaseOrderLineItemCurrency] = pydantic_v1.Field() + currency: typing.Optional[PurchaseOrderLineItemCurrency] = pydantic.Field() """ The purchase order line item's currency. @@ -391,35 +392,26 @@ class PurchaseOrderLineItem(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The purchase order line item's exchange rate. """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the purchase order line item belongs to. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/purchase_order_line_item_request.py b/src/merge/resources/accounting/types/purchase_order_line_item_request.py index 0724a705..eb75a5e6 100644 --- a/src/merge/resources/accounting/types/purchase_order_line_item_request.py +++ b/src/merge/resources/accounting/types/purchase_order_line_item_request.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .purchase_order_line_item_request_currency import PurchaseOrderLineItemRequestCurrency from .purchase_order_line_item_request_item import PurchaseOrderLineItemRequestItem -class PurchaseOrderLineItemRequest(pydantic_v1.BaseModel): +class PurchaseOrderLineItemRequest(UniversalBaseModel): """ # The PurchaseOrderLineItem Object @@ -22,53 +22,53 @@ class PurchaseOrderLineItemRequest(pydantic_v1.BaseModel): Fetch from the `GET PurchaseOrder` endpoint and view a company's purchase orders. """ - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ A description of the good being purchased. """ - unit_price: typing.Optional[float] = pydantic_v1.Field() + unit_price: typing.Optional[float] = pydantic.Field() """ The line item's unit price. """ - quantity: typing.Optional[float] = pydantic_v1.Field() + quantity: typing.Optional[float] = pydantic.Field() """ The line item's quantity. """ item: typing.Optional[PurchaseOrderLineItemRequestItem] - account: typing.Optional[str] = pydantic_v1.Field() + account: typing.Optional[str] = pydantic.Field() """ The purchase order line item's account. """ - tracking_category: typing.Optional[str] = pydantic_v1.Field() + tracking_category: typing.Optional[str] = pydantic.Field() """ The purchase order line item's associated tracking category. """ - tracking_categories: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + tracking_categories: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ The purchase order line item's associated tracking categories. """ - tax_amount: typing.Optional[str] = pydantic_v1.Field() + tax_amount: typing.Optional[str] = pydantic.Field() """ The purchase order line item's tax amount. """ - total_line_amount: typing.Optional[str] = pydantic_v1.Field() + total_line_amount: typing.Optional[str] = pydantic.Field() """ The purchase order line item's total amount. """ - currency: typing.Optional[PurchaseOrderLineItemRequestCurrency] = pydantic_v1.Field() + currency: typing.Optional[PurchaseOrderLineItemRequestCurrency] = pydantic.Field() """ The purchase order line item's currency. @@ -380,12 +380,12 @@ class PurchaseOrderLineItemRequest(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The purchase order line item's exchange rate. """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the purchase order line item belongs to. """ @@ -393,20 +393,11 @@ class PurchaseOrderLineItemRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/purchase_order_request.py b/src/merge/resources/accounting/types/purchase_order_request.py index fb80e6e1..16ad3293 100644 --- a/src/merge/resources/accounting/types/purchase_order_request.py +++ b/src/merge/resources/accounting/types/purchase_order_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .purchase_order_line_item_request import PurchaseOrderLineItemRequest from .purchase_order_request_company import PurchaseOrderRequestCompany from .purchase_order_request_currency import PurchaseOrderRequestCurrency @@ -14,7 +15,7 @@ from .purchase_order_request_vendor import PurchaseOrderRequestVendor -class PurchaseOrderRequest(pydantic_v1.BaseModel): +class PurchaseOrderRequest(UniversalBaseModel): """ # The PurchaseOrder Object @@ -27,7 +28,7 @@ class PurchaseOrderRequest(pydantic_v1.BaseModel): Fetch from the `LIST PurchaseOrders` endpoint and view a company's purchase orders. """ - status: typing.Optional[PurchaseOrderRequestStatus] = pydantic_v1.Field() + status: typing.Optional[PurchaseOrderRequestStatus] = pydantic.Field() """ The purchase order's status. @@ -38,47 +39,47 @@ class PurchaseOrderRequest(pydantic_v1.BaseModel): - `DELETED` - DELETED """ - issue_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + issue_date: typing.Optional[dt.datetime] = pydantic.Field() """ The purchase order's issue date. """ - delivery_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + delivery_date: typing.Optional[dt.datetime] = pydantic.Field() """ The purchase order's delivery date. """ - delivery_address: typing.Optional[PurchaseOrderRequestDeliveryAddress] = pydantic_v1.Field() + delivery_address: typing.Optional[PurchaseOrderRequestDeliveryAddress] = pydantic.Field() """ The purchase order's delivery address. """ - customer: typing.Optional[str] = pydantic_v1.Field() + customer: typing.Optional[str] = pydantic.Field() """ The contact making the purchase order. """ - vendor: typing.Optional[PurchaseOrderRequestVendor] = pydantic_v1.Field() + vendor: typing.Optional[PurchaseOrderRequestVendor] = pydantic.Field() """ The party fulfilling the purchase order. """ - memo: typing.Optional[str] = pydantic_v1.Field() + memo: typing.Optional[str] = pydantic.Field() """ A memo attached to the purchase order. """ - company: typing.Optional[PurchaseOrderRequestCompany] = pydantic_v1.Field() + company: typing.Optional[PurchaseOrderRequestCompany] = pydantic.Field() """ The company the purchase order belongs to. """ - total_amount: typing.Optional[float] = pydantic_v1.Field() + total_amount: typing.Optional[float] = pydantic.Field() """ The purchase order's total amount. """ - currency: typing.Optional[PurchaseOrderRequestCurrency] = pydantic_v1.Field() + currency: typing.Optional[PurchaseOrderRequestCurrency] = pydantic.Field() """ The purchase order's currency. @@ -390,7 +391,7 @@ class PurchaseOrderRequest(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The purchase order's exchange rate. """ @@ -400,20 +401,11 @@ class PurchaseOrderRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/purchase_order_response.py b/src/merge/resources/accounting/types/purchase_order_response.py index 675e925b..c1e9b7ea 100644 --- a/src/merge/resources/accounting/types/purchase_order_response.py +++ b/src/merge/resources/accounting/types/purchase_order_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .purchase_order import PurchaseOrder from .warning_validation_problem import WarningValidationProblem -class PurchaseOrderResponse(pydantic_v1.BaseModel): +class PurchaseOrderResponse(UniversalBaseModel): model: PurchaseOrder warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/remote_data.py b/src/merge/resources/accounting/types/remote_data.py index 098f551b..d50bfca2 100644 --- a/src/merge/resources/accounting/types/remote_data.py +++ b/src/merge/resources/accounting/types/remote_data.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteData(pydantic_v1.BaseModel): + +class RemoteData(UniversalBaseModel): path: str data: typing.Optional[typing.Any] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/remote_endpoint_info.py b/src/merge/resources/accounting/types/remote_endpoint_info.py index da6037bc..9f627cae 100644 --- a/src/merge/resources/accounting/types/remote_endpoint_info.py +++ b/src/merge/resources/accounting/types/remote_endpoint_info.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteEndpointInfo(pydantic_v1.BaseModel): + +class RemoteEndpointInfo(UniversalBaseModel): method: str url_path: str field_traversal_path: typing.List[typing.Any] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/remote_field_api.py b/src/merge/resources/accounting/types/remote_field_api.py index c2a16698..1d1efb6a 100644 --- a/src/merge/resources/accounting/types/remote_field_api.py +++ b/src/merge/resources/accounting/types/remote_field_api.py @@ -1,39 +1,28 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .advanced_metadata import AdvancedMetadata from .remote_endpoint_info import RemoteEndpointInfo from .remote_field_api_coverage import RemoteFieldApiCoverage -class RemoteFieldApi(pydantic_v1.BaseModel): - schema_: typing.Dict[str, typing.Any] = pydantic_v1.Field(alias="schema") +class RemoteFieldApi(UniversalBaseModel): + schema_: typing.Dict[str, typing.Any] = pydantic.Field(alias="schema") remote_key_name: str remote_endpoint_info: RemoteEndpointInfo example_values: typing.List[typing.Any] advanced_metadata: typing.Optional[AdvancedMetadata] coverage: typing.Optional[RemoteFieldApiCoverage] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/remote_field_api_response.py b/src/merge/resources/accounting/types/remote_field_api_response.py index de14b900..0ef13691 100644 --- a/src/merge/resources/accounting/types/remote_field_api_response.py +++ b/src/merge/resources/accounting/types/remote_field_api_response.py @@ -1,54 +1,41 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_field_api import RemoteFieldApi -class RemoteFieldApiResponse(pydantic_v1.BaseModel): - account: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Account") - accounting_attachment: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field( - alias="AccountingAttachment" - ) - balance_sheet: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="BalanceSheet") - cash_flow_statement: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="CashFlowStatement") - company_info: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="CompanyInfo") - contact: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Contact") - income_statement: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="IncomeStatement") - credit_note: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="CreditNote") - item: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Item") - purchase_order: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="PurchaseOrder") - tracking_category: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="TrackingCategory") - journal_entry: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="JournalEntry") - tax_rate: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="TaxRate") - invoice: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Invoice") - payment: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Payment") - expense: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Expense") - vendor_credit: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="VendorCredit") - transaction: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Transaction") - general_ledger_transaction: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field( +class RemoteFieldApiResponse(UniversalBaseModel): + account: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Account") + accounting_attachment: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="AccountingAttachment") + balance_sheet: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="BalanceSheet") + cash_flow_statement: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="CashFlowStatement") + company_info: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="CompanyInfo") + contact: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Contact") + income_statement: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="IncomeStatement") + credit_note: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="CreditNote") + item: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Item") + purchase_order: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="PurchaseOrder") + tracking_category: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="TrackingCategory") + journal_entry: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="JournalEntry") + tax_rate: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="TaxRate") + invoice: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Invoice") + payment: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Payment") + expense: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Expense") + vendor_credit: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="VendorCredit") + transaction: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Transaction") + general_ledger_transaction: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field( alias="GeneralLedgerTransaction" ) - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/remote_key.py b/src/merge/resources/accounting/types/remote_key.py index e0bec368..0ce7d620 100644 --- a/src/merge/resources/accounting/types/remote_key.py +++ b/src/merge/resources/accounting/types/remote_key.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteKey(pydantic_v1.BaseModel): + +class RemoteKey(UniversalBaseModel): """ # The RemoteKey Object @@ -23,20 +23,11 @@ class RemoteKey(pydantic_v1.BaseModel): name: str key: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/remote_response.py b/src/merge/resources/accounting/types/remote_response.py index f39951ee..5551bafc 100644 --- a/src/merge/resources/accounting/types/remote_response.py +++ b/src/merge/resources/accounting/types/remote_response.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .response_type_enum import ResponseTypeEnum -class RemoteResponse(pydantic_v1.BaseModel): +class RemoteResponse(UniversalBaseModel): """ # The RemoteResponse Object @@ -29,20 +29,11 @@ class RemoteResponse(pydantic_v1.BaseModel): response_type: typing.Optional[ResponseTypeEnum] headers: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/report_item.py b/src/merge/resources/accounting/types/report_item.py index 350ff55d..9383ea32 100644 --- a/src/merge/resources/accounting/types/report_item.py +++ b/src/merge/resources/accounting/types/report_item.py @@ -3,11 +3,12 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ReportItem(pydantic_v1.BaseModel): + +class ReportItem(UniversalBaseModel): """ # The ReportItem Object @@ -20,51 +21,42 @@ class ReportItem(pydantic_v1.BaseModel): Fetch from the `GET BalanceSheet` endpoint and view the balance sheet's report items. """ - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The report item's name. """ - value: typing.Optional[float] = pydantic_v1.Field() + value: typing.Optional[float] = pydantic.Field() """ The report item's value. """ sub_items: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the report item belongs to. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/sync_status.py b/src/merge/resources/accounting/types/sync_status.py index c6b7cbc4..03668cbf 100644 --- a/src/merge/resources/accounting/types/sync_status.py +++ b/src/merge/resources/accounting/types/sync_status.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .selective_sync_configurations_usage_enum import SelectiveSyncConfigurationsUsageEnum from .sync_status_status_enum import SyncStatusStatusEnum -class SyncStatus(pydantic_v1.BaseModel): +class SyncStatus(UniversalBaseModel): """ # The SyncStatus Object @@ -30,20 +31,11 @@ class SyncStatus(pydantic_v1.BaseModel): is_initial_sync: bool selective_sync_configurations_usage: typing.Optional[SelectiveSyncConfigurationsUsageEnum] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/tax_rate.py b/src/merge/resources/accounting/types/tax_rate.py index 5a9425d3..608e4200 100644 --- a/src/merge/resources/accounting/types/tax_rate.py +++ b/src/merge/resources/accounting/types/tax_rate.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData from .tax_rate_company import TaxRateCompany -class TaxRate(pydantic_v1.BaseModel): +class TaxRate(UniversalBaseModel): """ # The TaxRate Object @@ -23,42 +24,42 @@ class TaxRate(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The tax rate's description. """ - total_tax_rate: typing.Optional[float] = pydantic_v1.Field() + total_tax_rate: typing.Optional[float] = pydantic.Field() """ The tax’s total tax rate - sum of the tax components (not compounded). """ - effective_tax_rate: typing.Optional[float] = pydantic_v1.Field() + effective_tax_rate: typing.Optional[float] = pydantic.Field() """ The tax rate’s effective tax rate - total amount of tax with compounding. """ - company: typing.Optional[TaxRateCompany] = pydantic_v1.Field() + company: typing.Optional[TaxRateCompany] = pydantic.Field() """ The subsidiary that the tax rate belongs to (in the case of multi-entity systems). """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -66,20 +67,11 @@ class TaxRate(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/tracking_category.py b/src/merge/resources/accounting/types/tracking_category.py index f80fb38c..83a168c6 100644 --- a/src/merge/resources/accounting/types/tracking_category.py +++ b/src/merge/resources/accounting/types/tracking_category.py @@ -3,15 +3,16 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData from .tracking_category_category_type import TrackingCategoryCategoryType from .tracking_category_company import TrackingCategoryCompany from .tracking_category_status import TrackingCategoryStatus -class TrackingCategory(pydantic_v1.BaseModel): +class TrackingCategory(UniversalBaseModel): """ # The TrackingCategory Object @@ -25,27 +26,27 @@ class TrackingCategory(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The tracking category's name. """ - status: typing.Optional[TrackingCategoryStatus] = pydantic_v1.Field() + status: typing.Optional[TrackingCategoryStatus] = pydantic.Field() """ The tracking category's status. @@ -53,7 +54,7 @@ class TrackingCategory(pydantic_v1.BaseModel): - `ARCHIVED` - ARCHIVED """ - category_type: typing.Optional[TrackingCategoryCategoryType] = pydantic_v1.Field() + category_type: typing.Optional[TrackingCategoryCategoryType] = pydantic.Field() """ The tracking category’s type. @@ -61,17 +62,17 @@ class TrackingCategory(pydantic_v1.BaseModel): - `DEPARTMENT` - DEPARTMENT """ - parent_category: typing.Optional[str] = pydantic_v1.Field() + parent_category: typing.Optional[str] = pydantic.Field() """ ID of the parent tracking category. """ - company: typing.Optional[TrackingCategoryCompany] = pydantic_v1.Field() + company: typing.Optional[TrackingCategoryCompany] = pydantic.Field() """ The company the tracking category belongs to. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -79,20 +80,11 @@ class TrackingCategory(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/transaction.py b/src/merge/resources/accounting/types/transaction.py index 1c5920ff..1a78ca53 100644 --- a/src/merge/resources/accounting/types/transaction.py +++ b/src/merge/resources/accounting/types/transaction.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData from .transaction_account import TransactionAccount from .transaction_accounting_period import TransactionAccountingPeriod @@ -14,7 +15,7 @@ from .transaction_tracking_categories_item import TransactionTrackingCategoriesItem -class Transaction(pydantic_v1.BaseModel): +class Transaction(UniversalBaseModel): """ # The Transaction Object @@ -36,52 +37,52 @@ class Transaction(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - transaction_type: typing.Optional[str] = pydantic_v1.Field() + transaction_type: typing.Optional[str] = pydantic.Field() """ The type of transaction, which can by any transaction object not already included in Merge’s common model. """ - number: typing.Optional[str] = pydantic_v1.Field() + number: typing.Optional[str] = pydantic.Field() """ The transaction's number used for identifying purposes. """ - transaction_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + transaction_date: typing.Optional[dt.datetime] = pydantic.Field() """ The date upon which the transaction occurred. """ - account: typing.Optional[TransactionAccount] = pydantic_v1.Field() + account: typing.Optional[TransactionAccount] = pydantic.Field() """ The transaction's account. """ - contact: typing.Optional[TransactionContact] = pydantic_v1.Field() + contact: typing.Optional[TransactionContact] = pydantic.Field() """ The contact to whom the transaction relates to. """ - total_amount: typing.Optional[str] = pydantic_v1.Field() + total_amount: typing.Optional[str] = pydantic.Field() """ The total amount being paid after taxes. """ - currency: typing.Optional[TransactionCurrency] = pydantic_v1.Field() + currency: typing.Optional[TransactionCurrency] = pydantic.Field() """ The transaction's currency. @@ -393,24 +394,24 @@ class Transaction(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The transaction's exchange rate. """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the transaction belongs to. """ tracking_categories: typing.Optional[typing.List[typing.Optional[TransactionTrackingCategoriesItem]]] line_items: typing.Optional[typing.List[TransactionLineItem]] - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ - accounting_period: typing.Optional[TransactionAccountingPeriod] = pydantic_v1.Field() + accounting_period: typing.Optional[TransactionAccountingPeriod] = pydantic.Field() """ The accounting period that the Transaction was generated in. """ @@ -418,20 +419,11 @@ class Transaction(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/transaction_line_item.py b/src/merge/resources/accounting/types/transaction_line_item.py index 31e37c32..1e92a680 100644 --- a/src/merge/resources/accounting/types/transaction_line_item.py +++ b/src/merge/resources/accounting/types/transaction_line_item.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .transaction_line_item_currency import TransactionLineItemCurrency from .transaction_line_item_item import TransactionLineItemItem -class TransactionLineItem(pydantic_v1.BaseModel): +class TransactionLineItem(UniversalBaseModel): """ # The TransactionLineItem Object @@ -23,59 +24,59 @@ class TransactionLineItem(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - memo: typing.Optional[str] = pydantic_v1.Field() + memo: typing.Optional[str] = pydantic.Field() """ An internal note used by the business to clarify purpose of the transaction. """ - unit_price: typing.Optional[str] = pydantic_v1.Field() + unit_price: typing.Optional[str] = pydantic.Field() """ The line item's unit price. """ - quantity: typing.Optional[str] = pydantic_v1.Field() + quantity: typing.Optional[str] = pydantic.Field() """ The line item's quantity. """ item: typing.Optional[TransactionLineItemItem] - account: typing.Optional[str] = pydantic_v1.Field() + account: typing.Optional[str] = pydantic.Field() """ The line item's account. """ - tracking_category: typing.Optional[str] = pydantic_v1.Field() + tracking_category: typing.Optional[str] = pydantic.Field() """ The line's associated tracking category. """ - tracking_categories: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + tracking_categories: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ The line's associated tracking categories. """ - total_line_amount: typing.Optional[str] = pydantic_v1.Field() + total_line_amount: typing.Optional[str] = pydantic.Field() """ The line item's total. """ tax_rate: typing.Optional[str] - currency: typing.Optional[TransactionLineItemCurrency] = pydantic_v1.Field() + currency: typing.Optional[TransactionLineItemCurrency] = pydantic.Field() """ The line item's currency. @@ -387,35 +388,26 @@ class TransactionLineItem(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The line item's exchange rate. """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the line belongs to. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/validation_problem_source.py b/src/merge/resources/accounting/types/validation_problem_source.py index fde15b40..c65d82ef 100644 --- a/src/merge/resources/accounting/types/validation_problem_source.py +++ b/src/merge/resources/accounting/types/validation_problem_source.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ValidationProblemSource(pydantic_v1.BaseModel): - pointer: str - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ValidationProblemSource(UniversalBaseModel): + pointer: str - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/vendor_credit.py b/src/merge/resources/accounting/types/vendor_credit.py index 2b8d01b7..6adcbd13 100644 --- a/src/merge/resources/accounting/types/vendor_credit.py +++ b/src/merge/resources/accounting/types/vendor_credit.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData from .vendor_credit_accounting_period import VendorCreditAccountingPeriod from .vendor_credit_company import VendorCreditCompany @@ -14,7 +15,7 @@ from .vendor_credit_vendor import VendorCreditVendor -class VendorCredit(pydantic_v1.BaseModel): +class VendorCredit(UniversalBaseModel): """ # The VendorCredit Object @@ -28,42 +29,42 @@ class VendorCredit(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - number: typing.Optional[str] = pydantic_v1.Field() + number: typing.Optional[str] = pydantic.Field() """ The vendor credit's number. """ - transaction_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + transaction_date: typing.Optional[dt.datetime] = pydantic.Field() """ The vendor credit's transaction date. """ - vendor: typing.Optional[VendorCreditVendor] = pydantic_v1.Field() + vendor: typing.Optional[VendorCreditVendor] = pydantic.Field() """ The vendor that owes the gift or refund. """ - total_amount: typing.Optional[float] = pydantic_v1.Field() + total_amount: typing.Optional[float] = pydantic.Field() """ The vendor credit's total amount. """ - currency: typing.Optional[VendorCreditCurrency] = pydantic_v1.Field() + currency: typing.Optional[VendorCreditCurrency] = pydantic.Field() """ The vendor credit's currency. @@ -375,24 +376,24 @@ class VendorCredit(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The vendor credit's exchange rate. """ - company: typing.Optional[VendorCreditCompany] = pydantic_v1.Field() + company: typing.Optional[VendorCreditCompany] = pydantic.Field() """ The company the vendor credit belongs to. """ lines: typing.Optional[typing.List[VendorCreditLine]] tracking_categories: typing.Optional[typing.List[typing.Optional[VendorCreditTrackingCategoriesItem]]] - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ - accounting_period: typing.Optional[VendorCreditAccountingPeriod] = pydantic_v1.Field() + accounting_period: typing.Optional[VendorCreditAccountingPeriod] = pydantic.Field() """ The accounting period that the VendorCredit was generated in. """ @@ -400,20 +401,11 @@ class VendorCredit(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/vendor_credit_line.py b/src/merge/resources/accounting/types/vendor_credit_line.py index 7cf9691e..70c41e07 100644 --- a/src/merge/resources/accounting/types/vendor_credit_line.py +++ b/src/merge/resources/accounting/types/vendor_credit_line.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .vendor_credit_line_account import VendorCreditLineAccount -class VendorCreditLine(pydantic_v1.BaseModel): +class VendorCreditLine(UniversalBaseModel): """ # The VendorCreditLine Object @@ -22,75 +23,66 @@ class VendorCreditLine(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - net_amount: typing.Optional[float] = pydantic_v1.Field() + net_amount: typing.Optional[float] = pydantic.Field() """ The full value of the credit. """ - tracking_category: typing.Optional[str] = pydantic_v1.Field() + tracking_category: typing.Optional[str] = pydantic.Field() """ The line's associated tracking category. """ - tracking_categories: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + tracking_categories: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ The line's associated tracking categories. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The line's description. """ - account: typing.Optional[VendorCreditLineAccount] = pydantic_v1.Field() + account: typing.Optional[VendorCreditLineAccount] = pydantic.Field() """ The line's account. """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The company the line belongs to. """ - exchange_rate: typing.Optional[str] = pydantic_v1.Field() + exchange_rate: typing.Optional[str] = pydantic.Field() """ The vendor credit line item's exchange rate. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/warning_validation_problem.py b/src/merge/resources/accounting/types/warning_validation_problem.py index 6baf9600..348d668a 100644 --- a/src/merge/resources/accounting/types/warning_validation_problem.py +++ b/src/merge/resources/accounting/types/warning_validation_problem.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .validation_problem_source import ValidationProblemSource -class WarningValidationProblem(pydantic_v1.BaseModel): +class WarningValidationProblem(UniversalBaseModel): source: typing.Optional[ValidationProblemSource] title: str detail: str problem_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/accounting/types/webhook_receiver.py b/src/merge/resources/accounting/types/webhook_receiver.py index 0544f256..bb10af95 100644 --- a/src/merge/resources/accounting/types/webhook_receiver.py +++ b/src/merge/resources/accounting/types/webhook_receiver.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class WebhookReceiver(pydantic_v1.BaseModel): + +class WebhookReceiver(UniversalBaseModel): event: str is_active: bool key: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/resources/account_details/client.py b/src/merge/resources/ats/resources/account_details/client.py index 314ddb78..94666456 100644 --- a/src/merge/resources/ats/resources/account_details/client.py +++ b/src/merge/resources/ats/resources/account_details/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.account_details import AccountDetails @@ -41,9 +41,9 @@ def retrieve(self, *, request_options: typing.Optional[RequestOptions] = None) - _response = self._client_wrapper.httpx_client.request( "ats/v1/account-details", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountDetails, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountDetails, parse_obj_as(type_=AccountDetails, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -70,20 +70,28 @@ async def retrieve(self, *, request_options: typing.Optional[RequestOptions] = N Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.account_details.retrieve() + + + async def main() -> None: + await client.ats.account_details.retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/account-details", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountDetails, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountDetails, parse_obj_as(type_=AccountDetails, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/account_token/client.py b/src/merge/resources/ats/resources/account_token/client.py index fb1e8e11..18da4910 100644 --- a/src/merge/resources/ats/resources/account_token/client.py +++ b/src/merge/resources/ats/resources/account_token/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.account_token import AccountToken @@ -46,9 +46,9 @@ def retrieve(self, public_token: str, *, request_options: typing.Optional[Reques _response = self._client_wrapper.httpx_client.request( f"ats/v1/account-token/{jsonable_encoder(public_token)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountToken, parse_obj_as(type_=AccountToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -79,22 +79,30 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.account_token.retrieve( - public_token="public_token", - ) + + + async def main() -> None: + await client.ats.account_token.retrieve( + public_token="public_token", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/account-token/{jsonable_encoder(public_token)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountToken, parse_obj_as(type_=AccountToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/activities/client.py b/src/merge/resources/ats/resources/activities/client.py index b90ac796..9dbc4f67 100644 --- a/src/merge/resources/ats/resources/activities/client.py +++ b/src/merge/resources/ats/resources/activities/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.activity import Activity from ...types.activity_request import ActivityRequest @@ -128,9 +128,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedActivityList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedActivityList, parse_obj_as(type_=PaginatedActivityList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -190,9 +190,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ActivityResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ActivityResponse, parse_obj_as(type_=ActivityResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -258,9 +258,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Activity, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Activity, parse_obj_as(type_=Activity, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -293,9 +293,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "ats/v1/activities/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -378,13 +378,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.activities.list() + + + async def main() -> None: + await client.ats.activities.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/activities", @@ -406,9 +414,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedActivityList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedActivityList, parse_obj_as(type_=PaginatedActivityList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -448,6 +456,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ats import ActivityRequest @@ -455,10 +465,16 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.activities.create( - model=ActivityRequest(), - remote_user_id="remote_user_id", - ) + + + async def main() -> None: + await client.ats.activities.create( + model=ActivityRequest(), + remote_user_id="remote_user_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/activities", @@ -468,9 +484,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ActivityResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ActivityResponse, parse_obj_as(type_=ActivityResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -515,15 +531,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.activities.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.activities.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/activities/{jsonable_encoder(id)}", @@ -536,9 +560,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Activity, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Activity, parse_obj_as(type_=Activity, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -560,20 +584,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.activities.meta_post_retrieve() + + + async def main() -> None: + await client.ats.activities.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/activities/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/applications/client.py b/src/merge/resources/ats/resources/applications/client.py index d04961ee..561d76fd 100644 --- a/src/merge/resources/ats/resources/applications/client.py +++ b/src/merge/resources/ats/resources/applications/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.application import Application from ...types.application_request import ApplicationRequest @@ -141,9 +141,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedApplicationList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedApplicationList, parse_obj_as(type_=PaginatedApplicationList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -203,9 +203,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ApplicationResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ApplicationResponse, parse_obj_as(type_=ApplicationResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -258,9 +258,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Application, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Application, parse_obj_as(type_=Application, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -322,9 +322,9 @@ def change_stage_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ApplicationResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ApplicationResponse, parse_obj_as(type_=ApplicationResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -368,9 +368,9 @@ def meta_post_retrieve( params={"application_remote_template_id": application_remote_template_id}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -465,13 +465,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.applications.list() + + + async def main() -> None: + await client.ats.applications.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/applications", @@ -496,9 +504,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedApplicationList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedApplicationList, parse_obj_as(type_=PaginatedApplicationList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -538,6 +546,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ats import ApplicationRequest @@ -545,10 +555,16 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.applications.create( - model=ApplicationRequest(), - remote_user_id="remote_user_id", - ) + + + async def main() -> None: + await client.ats.applications.create( + model=ApplicationRequest(), + remote_user_id="remote_user_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/applications", @@ -558,9 +574,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ApplicationResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ApplicationResponse, parse_obj_as(type_=ApplicationResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -597,15 +613,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.applications.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.applications.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/applications/{jsonable_encoder(id)}", @@ -613,9 +637,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Application, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Application, parse_obj_as(type_=Application, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -659,15 +683,23 @@ async def change_stage_create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.applications.change_stage_create( - id="id", - ) + + + async def main() -> None: + await client.ats.applications.change_stage_create( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/applications/{jsonable_encoder(id)}/change-stage", @@ -677,9 +709,9 @@ async def change_stage_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ApplicationResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ApplicationResponse, parse_obj_as(type_=ApplicationResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -709,13 +741,21 @@ async def meta_post_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.applications.meta_post_retrieve() + + + async def main() -> None: + await client.ats.applications.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/applications/meta/post", @@ -723,9 +763,9 @@ async def meta_post_retrieve( params={"application_remote_template_id": application_remote_template_id}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/async_passthrough/client.py b/src/merge/resources/ats/resources/async_passthrough/client.py index 6dee374e..0bf96415 100644 --- a/src/merge/resources/ats/resources/async_passthrough/client.py +++ b/src/merge/resources/ats/resources/async_passthrough/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.async_passthrough_reciept import AsyncPassthroughReciept from ...types.data_passthrough_request import DataPassthroughRequest @@ -57,9 +57,9 @@ def create( _response = self._client_wrapper.httpx_client.request( "ats/v1/async-passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AsyncPassthroughReciept, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AsyncPassthroughReciept, parse_obj_as(type_=AsyncPassthroughReciept, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -100,9 +100,9 @@ def retrieve( method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -133,6 +133,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ats import DataPassthroughRequest, MethodEnum @@ -140,19 +142,25 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.async_passthrough.create( - request=DataPassthroughRequest( - method=MethodEnum.GET, - path="/scooters", - ), - ) + + + async def main() -> None: + await client.ats.async_passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/async-passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AsyncPassthroughReciept, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AsyncPassthroughReciept, parse_obj_as(type_=AsyncPassthroughReciept, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -178,24 +186,32 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.async_passthrough.retrieve( - async_passthrough_receipt_id="async_passthrough_receipt_id", - ) + + + async def main() -> None: + await client.ats.async_passthrough.retrieve( + async_passthrough_receipt_id="async_passthrough_receipt_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/async-passthrough/{jsonable_encoder(async_passthrough_receipt_id)}", method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/attachments/client.py b/src/merge/resources/ats/resources/attachments/client.py index 11c5393b..4cc890eb 100644 --- a/src/merge/resources/ats/resources/attachments/client.py +++ b/src/merge/resources/ats/resources/attachments/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.attachment import Attachment from ...types.attachment_request import AttachmentRequest @@ -124,9 +124,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAttachmentList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAttachmentList, parse_obj_as(type_=PaginatedAttachmentList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -186,9 +186,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AttachmentResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AttachmentResponse, parse_obj_as(type_=AttachmentResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -254,9 +254,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Attachment, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Attachment, parse_obj_as(type_=Attachment, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -289,9 +289,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "ats/v1/attachments/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -374,13 +374,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.attachments.list() + + + async def main() -> None: + await client.ats.attachments.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/attachments", @@ -402,9 +410,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAttachmentList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAttachmentList, parse_obj_as(type_=PaginatedAttachmentList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -444,6 +452,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ats import AttachmentRequest @@ -451,10 +461,16 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.attachments.create( - model=AttachmentRequest(), - remote_user_id="remote_user_id", - ) + + + async def main() -> None: + await client.ats.attachments.create( + model=AttachmentRequest(), + remote_user_id="remote_user_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/attachments", @@ -464,9 +480,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AttachmentResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AttachmentResponse, parse_obj_as(type_=AttachmentResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -511,15 +527,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.attachments.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.attachments.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/attachments/{jsonable_encoder(id)}", @@ -532,9 +556,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Attachment, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Attachment, parse_obj_as(type_=Attachment, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -556,20 +580,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.attachments.meta_post_retrieve() + + + async def main() -> None: + await client.ats.attachments.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/attachments/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/audit_trail/client.py b/src/merge/resources/ats/resources/audit_trail/client.py index 7fe8652e..4197e2d2 100644 --- a/src/merge/resources/ats/resources/audit_trail/client.py +++ b/src/merge/resources/ats/resources/audit_trail/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_audit_log_event_list import PaginatedAuditLogEventList @@ -79,9 +79,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAuditLogEventList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAuditLogEventList, parse_obj_as(type_=PaginatedAuditLogEventList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -136,13 +136,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.audit_trail.list() + + + async def main() -> None: + await client.ats.audit_trail.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/audit-trail", @@ -157,9 +165,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAuditLogEventList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAuditLogEventList, parse_obj_as(type_=PaginatedAuditLogEventList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/available_actions/client.py b/src/merge/resources/ats/resources/available_actions/client.py index fe2f1523..ee567224 100644 --- a/src/merge/resources/ats/resources/available_actions/client.py +++ b/src/merge/resources/ats/resources/available_actions/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.available_actions import AvailableActions @@ -41,9 +41,9 @@ def retrieve(self, *, request_options: typing.Optional[RequestOptions] = None) - _response = self._client_wrapper.httpx_client.request( "ats/v1/available-actions", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AvailableActions, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AvailableActions, parse_obj_as(type_=AvailableActions, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -70,20 +70,28 @@ async def retrieve(self, *, request_options: typing.Optional[RequestOptions] = N Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.available_actions.retrieve() + + + async def main() -> None: + await client.ats.available_actions.retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/available-actions", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AvailableActions, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AvailableActions, parse_obj_as(type_=AvailableActions, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/candidates/client.py b/src/merge/resources/ats/resources/candidates/client.py index 4f462220..a8067bfc 100644 --- a/src/merge/resources/ats/resources/candidates/client.py +++ b/src/merge/resources/ats/resources/candidates/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.candidate import Candidate from ...types.candidate_request import CandidateRequest @@ -133,9 +133,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCandidateList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCandidateList, parse_obj_as(type_=PaginatedCandidateList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -195,9 +195,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CandidateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CandidateResponse, parse_obj_as(type_=CandidateResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -250,9 +250,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Candidate, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Candidate, parse_obj_as(type_=Candidate, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -316,9 +316,9 @@ def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CandidateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CandidateResponse, parse_obj_as(type_=CandidateResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -371,9 +371,9 @@ def ignore_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -410,9 +410,9 @@ def meta_patch_retrieve(self, id: str, *, request_options: typing.Optional[Reque _response = self._client_wrapper.httpx_client.request( f"ats/v1/candidates/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -445,9 +445,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "ats/v1/candidates/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -534,13 +534,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.candidates.list() + + + async def main() -> None: + await client.ats.candidates.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/candidates", @@ -563,9 +571,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCandidateList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCandidateList, parse_obj_as(type_=PaginatedCandidateList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -605,6 +613,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ats import CandidateRequest @@ -612,10 +622,16 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.candidates.create( - model=CandidateRequest(), - remote_user_id="remote_user_id", - ) + + + async def main() -> None: + await client.ats.candidates.create( + model=CandidateRequest(), + remote_user_id="remote_user_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/candidates", @@ -625,9 +641,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CandidateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CandidateResponse, parse_obj_as(type_=CandidateResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -664,15 +680,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.candidates.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.candidates.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/candidates/{jsonable_encoder(id)}", @@ -680,9 +704,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Candidate, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Candidate, parse_obj_as(type_=Candidate, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -725,6 +749,8 @@ async def partial_update( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ats import PatchedCandidateRequest @@ -732,11 +758,17 @@ async def partial_update( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.candidates.partial_update( - id="id", - model=PatchedCandidateRequest(), - remote_user_id="remote_user_id", - ) + + + async def main() -> None: + await client.ats.candidates.partial_update( + id="id", + model=PatchedCandidateRequest(), + remote_user_id="remote_user_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/candidates/{jsonable_encoder(id)}", @@ -746,9 +778,9 @@ async def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CandidateResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CandidateResponse, parse_obj_as(type_=CandidateResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -782,6 +814,8 @@ async def ignore_create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ats import ReasonEnum @@ -789,10 +823,16 @@ async def ignore_create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.candidates.ignore_create( - model_id="model_id", - reason=ReasonEnum.GENERAL_CUSTOMER_REQUEST, - ) + + + async def main() -> None: + await client.ats.candidates.ignore_create( + model_id="model_id", + reason=ReasonEnum.GENERAL_CUSTOMER_REQUEST, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/candidates/ignore/{jsonable_encoder(model_id)}", @@ -801,9 +841,9 @@ async def ignore_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -829,22 +869,30 @@ async def meta_patch_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.candidates.meta_patch_retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.candidates.meta_patch_retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/candidates/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -866,20 +914,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.candidates.meta_post_retrieve() + + + async def main() -> None: + await client.ats.candidates.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/candidates/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/delete_account/client.py b/src/merge/resources/ats/resources/delete_account/client.py index 178b5f05..59666c32 100644 --- a/src/merge/resources/ats/resources/delete_account/client.py +++ b/src/merge/resources/ats/resources/delete_account/client.py @@ -38,9 +38,9 @@ def delete(self, *, request_options: typing.Optional[RequestOptions] = None) -> _response = self._client_wrapper.httpx_client.request( "ats/v1/delete-account", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -66,20 +66,28 @@ async def delete(self, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.delete_account.delete() + + + async def main() -> None: + await client.ats.delete_account.delete() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/delete-account", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/departments/client.py b/src/merge/resources/ats/resources/departments/client.py index 6d7ac2a1..29ec07d0 100644 --- a/src/merge/resources/ats/resources/departments/client.py +++ b/src/merge/resources/ats/resources/departments/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.department import Department from ...types.paginated_department_list import PaginatedDepartmentList @@ -98,9 +98,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedDepartmentList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedDepartmentList, parse_obj_as(type_=PaginatedDepartmentList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -149,9 +149,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Department, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Department, parse_obj_as(type_=Department, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -218,13 +218,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.departments.list() + + + async def main() -> None: + await client.ats.departments.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/departments", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedDepartmentList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedDepartmentList, parse_obj_as(type_=PaginatedDepartmentList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -277,15 +285,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.departments.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.departments.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/departments/{jsonable_encoder(id)}", @@ -293,9 +309,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Department, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Department, parse_obj_as(type_=Department, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/eeocs/client.py b/src/merge/resources/ats/resources/eeocs/client.py index 7e3a1ced..54b787af 100644 --- a/src/merge/resources/ats/resources/eeocs/client.py +++ b/src/merge/resources/ats/resources/eeocs/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.eeoc import Eeoc from ...types.paginated_eeoc_list import PaginatedEeocList @@ -122,9 +122,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedEeocList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedEeocList, parse_obj_as(type_=PaginatedEeocList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -190,9 +190,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Eeoc, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Eeoc, parse_obj_as(type_=Eeoc, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -275,13 +275,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.eeocs.list() + + + async def main() -> None: + await client.ats.eeocs.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/eeocs", @@ -303,9 +311,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedEeocList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedEeocList, parse_obj_as(type_=PaginatedEeocList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -350,15 +358,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.eeocs.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.eeocs.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/eeocs/{jsonable_encoder(id)}", @@ -371,9 +387,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Eeoc, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Eeoc, parse_obj_as(type_=Eeoc, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/field_mapping/client.py b/src/merge/resources/ats/resources/field_mapping/client.py index 0869239c..9e2c3d3c 100644 --- a/src/merge/resources/ats/resources/field_mapping/client.py +++ b/src/merge/resources/ats/resources/field_mapping/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.external_target_field_api_response import ExternalTargetFieldApiResponse from ...types.field_mapping_api_instance_response import FieldMappingApiInstanceResponse @@ -50,9 +50,9 @@ def field_mappings_retrieve( _response = self._client_wrapper.httpx_client.request( "ats/v1/field-mappings", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingApiInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingApiInstanceResponse, parse_obj_as(type_=FieldMappingApiInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -131,9 +131,9 @@ def field_mappings_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -174,9 +174,9 @@ def field_mappings_destroy( method="DELETE", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -238,9 +238,9 @@ def field_mappings_partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -288,9 +288,9 @@ def remote_fields_retrieve( params={"common_models": common_models, "include_example_values": include_example_values}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteFieldApiResponse, parse_obj_as(type_=RemoteFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -325,9 +325,9 @@ def target_fields_retrieve( _response = self._client_wrapper.httpx_client.request( "ats/v1/target-fields", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExternalTargetFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ExternalTargetFieldApiResponse, parse_obj_as(type_=ExternalTargetFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -356,20 +356,28 @@ async def field_mappings_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.field_mapping.field_mappings_retrieve() + + + async def main() -> None: + await client.ats.field_mapping.field_mappings_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/field-mappings", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingApiInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingApiInstanceResponse, parse_obj_as(type_=FieldMappingApiInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -419,20 +427,28 @@ async def field_mappings_create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.field_mapping.field_mappings_create( - target_field_name="example_target_field_name", - target_field_description="this is a example description of the target field", - remote_field_traversal_path=["example_remote_field"], - remote_method="GET", - remote_url_path="/example-url-path", - common_model_name="ExampleCommonModel", - ) + + + async def main() -> None: + await client.ats.field_mapping.field_mappings_create( + target_field_name="example_target_field_name", + target_field_description="this is a example description of the target field", + remote_field_traversal_path=["example_remote_field"], + remote_method="GET", + remote_url_path="/example-url-path", + common_model_name="ExampleCommonModel", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/field-mappings", @@ -448,9 +464,9 @@ async def field_mappings_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -476,24 +492,32 @@ async def field_mappings_destroy( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.field_mapping.field_mappings_destroy( - field_mapping_id="field_mapping_id", - ) + + + async def main() -> None: + await client.ats.field_mapping.field_mappings_destroy( + field_mapping_id="field_mapping_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/field-mappings/{jsonable_encoder(field_mapping_id)}", method="DELETE", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -534,15 +558,23 @@ async def field_mappings_partial_update( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.field_mapping.field_mappings_partial_update( - field_mapping_id="field_mapping_id", - ) + + + async def main() -> None: + await client.ats.field_mapping.field_mappings_partial_update( + field_mapping_id="field_mapping_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/field-mappings/{jsonable_encoder(field_mapping_id)}", @@ -555,9 +587,9 @@ async def field_mappings_partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -591,13 +623,21 @@ async def remote_fields_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.field_mapping.remote_fields_retrieve() + + + async def main() -> None: + await client.ats.field_mapping.remote_fields_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/remote-fields", @@ -605,9 +645,9 @@ async def remote_fields_retrieve( params={"common_models": common_models, "include_example_values": include_example_values}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteFieldApiResponse, parse_obj_as(type_=RemoteFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -631,20 +671,28 @@ async def target_fields_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.field_mapping.target_fields_retrieve() + + + async def main() -> None: + await client.ats.field_mapping.target_fields_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/target-fields", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExternalTargetFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ExternalTargetFieldApiResponse, parse_obj_as(type_=ExternalTargetFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/force_resync/client.py b/src/merge/resources/ats/resources/force_resync/client.py index 7d89e48e..f9fa78c5 100644 --- a/src/merge/resources/ats/resources/force_resync/client.py +++ b/src/merge/resources/ats/resources/force_resync/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.sync_status import SyncStatus @@ -43,9 +43,9 @@ def sync_status_resync_create( _response = self._client_wrapper.httpx_client.request( "ats/v1/sync-status/resync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[SyncStatus], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[SyncStatus], parse_obj_as(type_=typing.List[SyncStatus], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -74,20 +74,28 @@ async def sync_status_resync_create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.force_resync.sync_status_resync_create() + + + async def main() -> None: + await client.ats.force_resync.sync_status_resync_create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/sync-status/resync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[SyncStatus], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[SyncStatus], parse_obj_as(type_=typing.List[SyncStatus], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/generate_key/client.py b/src/merge/resources/ats/resources/generate_key/client.py index d831f4e4..76bc2c83 100644 --- a/src/merge/resources/ats/resources/generate_key/client.py +++ b/src/merge/resources/ats/resources/generate_key/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.remote_key import RemoteKey @@ -49,9 +49,9 @@ def create(self, *, name: str, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "ats/v1/generate-key", method="POST", json={"name": name}, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -81,22 +81,30 @@ async def create(self, *, name: str, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.generate_key.create( - name="Remote Deployment Key 1", - ) + + + async def main() -> None: + await client.ats.generate_key.create( + name="Remote Deployment Key 1", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/generate-key", method="POST", json={"name": name}, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/interviews/client.py b/src/merge/resources/ats/resources/interviews/client.py index c8bd88fd..a33d7b12 100644 --- a/src/merge/resources/ats/resources/interviews/client.py +++ b/src/merge/resources/ats/resources/interviews/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.meta_response import MetaResponse from ...types.paginated_scheduled_interview_list import PaginatedScheduledInterviewList @@ -141,9 +141,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedScheduledInterviewList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedScheduledInterviewList, parse_obj_as(type_=PaginatedScheduledInterviewList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -203,9 +203,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScheduledInterviewResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ScheduledInterviewResponse, parse_obj_as(type_=ScheduledInterviewResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -271,9 +271,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScheduledInterview, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ScheduledInterview, parse_obj_as(type_=ScheduledInterview, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -306,9 +306,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "ats/v1/interviews/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -403,13 +403,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.interviews.list() + + + async def main() -> None: + await client.ats.interviews.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/interviews", @@ -434,9 +442,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedScheduledInterviewList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedScheduledInterviewList, parse_obj_as(type_=PaginatedScheduledInterviewList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -476,6 +484,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ats import ScheduledInterviewRequest @@ -483,10 +493,16 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.interviews.create( - model=ScheduledInterviewRequest(), - remote_user_id="remote_user_id", - ) + + + async def main() -> None: + await client.ats.interviews.create( + model=ScheduledInterviewRequest(), + remote_user_id="remote_user_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/interviews", @@ -496,9 +512,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScheduledInterviewResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ScheduledInterviewResponse, parse_obj_as(type_=ScheduledInterviewResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -543,15 +559,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.interviews.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.interviews.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/interviews/{jsonable_encoder(id)}", @@ -564,9 +588,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ScheduledInterview, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ScheduledInterview, parse_obj_as(type_=ScheduledInterview, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -588,20 +612,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.interviews.meta_post_retrieve() + + + async def main() -> None: + await client.ats.interviews.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/interviews/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/issues/client.py b/src/merge/resources/ats/resources/issues/client.py index e907d702..c2ec3182 100644 --- a/src/merge/resources/ats/resources/issues/client.py +++ b/src/merge/resources/ats/resources/issues/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.issue import Issue from ...types.paginated_issue_list import PaginatedIssueList @@ -127,9 +127,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedIssueList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedIssueList, parse_obj_as(type_=PaginatedIssueList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -166,9 +166,9 @@ def retrieve(self, id: str, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( f"ats/v1/issues/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Issue, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Issue, parse_obj_as(type_=Issue, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -251,13 +251,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.issues.list() + + + async def main() -> None: + await client.ats.issues.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/issues", @@ -287,9 +295,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedIssueList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedIssueList, parse_obj_as(type_=PaginatedIssueList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -313,22 +321,30 @@ async def retrieve(self, id: str, *, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.issues.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.issues.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/issues/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Issue, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Issue, parse_obj_as(type_=Issue, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/job_interview_stages/client.py b/src/merge/resources/ats/resources/job_interview_stages/client.py index 68d266d1..124ac8af 100644 --- a/src/merge/resources/ats/resources/job_interview_stages/client.py +++ b/src/merge/resources/ats/resources/job_interview_stages/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.job_interview_stage import JobInterviewStage from ...types.paginated_job_interview_stage_list import PaginatedJobInterviewStageList @@ -108,9 +108,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedJobInterviewStageList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedJobInterviewStageList, parse_obj_as(type_=PaginatedJobInterviewStageList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -163,9 +163,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(JobInterviewStage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(JobInterviewStage, parse_obj_as(type_=JobInterviewStage, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -240,13 +240,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.job_interview_stages.list() + + + async def main() -> None: + await client.ats.job_interview_stages.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/job-interview-stages", @@ -266,9 +274,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedJobInterviewStageList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedJobInterviewStageList, parse_obj_as(type_=PaginatedJobInterviewStageList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -305,15 +313,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.job_interview_stages.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.job_interview_stages.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/job-interview-stages/{jsonable_encoder(id)}", @@ -321,9 +337,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(JobInterviewStage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(JobInterviewStage, parse_obj_as(type_=JobInterviewStage, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/job_postings/client.py b/src/merge/resources/ats/resources/job_postings/client.py index f0ab99ae..f8fc3bac 100644 --- a/src/merge/resources/ats/resources/job_postings/client.py +++ b/src/merge/resources/ats/resources/job_postings/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.job_posting import JobPosting from ...types.paginated_job_posting_list import PaginatedJobPostingList @@ -115,9 +115,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedJobPostingList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedJobPostingList, parse_obj_as(type_=PaginatedJobPostingList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -170,9 +170,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(JobPosting, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(JobPosting, parse_obj_as(type_=JobPosting, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -253,13 +253,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.job_postings.list() + + + async def main() -> None: + await client.ats.job_postings.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/job-postings", @@ -279,9 +287,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedJobPostingList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedJobPostingList, parse_obj_as(type_=PaginatedJobPostingList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -318,15 +326,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.job_postings.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.job_postings.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/job-postings/{jsonable_encoder(id)}", @@ -334,9 +350,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(JobPosting, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(JobPosting, parse_obj_as(type_=JobPosting, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/jobs/client.py b/src/merge/resources/ats/resources/jobs/client.py index 8b454c6b..ef1c0361 100644 --- a/src/merge/resources/ats/resources/jobs/client.py +++ b/src/merge/resources/ats/resources/jobs/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.job import Job from ...types.paginated_job_list import PaginatedJobList @@ -139,9 +139,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedJobList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedJobList, parse_obj_as(type_=PaginatedJobList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -207,9 +207,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Job, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Job, parse_obj_as(type_=Job, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -280,9 +280,9 @@ def screening_questions_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedScreeningQuestionList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedScreeningQuestionList, parse_obj_as(type_=PaginatedScreeningQuestionList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -379,13 +379,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.jobs.list() + + + async def main() -> None: + await client.ats.jobs.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/jobs", @@ -409,9 +417,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedJobList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedJobList, parse_obj_as(type_=PaginatedJobList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -456,15 +464,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.jobs.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.jobs.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/jobs/{jsonable_encoder(id)}", @@ -477,9 +493,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Job, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Job, parse_obj_as(type_=Job, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -528,15 +544,23 @@ async def screening_questions_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.jobs.screening_questions_list( - job_id="job_id", - ) + + + async def main() -> None: + await client.ats.jobs.screening_questions_list( + job_id="job_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/jobs/{jsonable_encoder(job_id)}/screening-questions", @@ -550,9 +574,9 @@ async def screening_questions_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedScreeningQuestionList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedScreeningQuestionList, parse_obj_as(type_=PaginatedScreeningQuestionList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/link_token/client.py b/src/merge/resources/ats/resources/link_token/client.py index 60a30776..0e7aecca 100644 --- a/src/merge/resources/ats/resources/link_token/client.py +++ b/src/merge/resources/ats/resources/link_token/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.categories_enum import CategoriesEnum from ...types.common_model_scopes_body_request import CommonModelScopesBodyRequest @@ -119,9 +119,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LinkToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(LinkToken, parse_obj_as(type_=LinkToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -198,6 +198,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ats import CategoriesEnum @@ -205,12 +207,18 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.link_token.create( - end_user_email_address="example@gmail.com", - end_user_organization_name="Test Organization", - end_user_origin_id="12345", - categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], - ) + + + async def main() -> None: + await client.ats.link_token.create( + end_user_email_address="example@gmail.com", + end_user_organization_name="Test Organization", + end_user_origin_id="12345", + categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/link-token", @@ -231,9 +239,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LinkToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(LinkToken, parse_obj_as(type_=LinkToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/linked_accounts/client.py b/src/merge/resources/ats/resources/linked_accounts/client.py index 4a016f17..94681fff 100644 --- a/src/merge/resources/ats/resources/linked_accounts/client.py +++ b/src/merge/resources/ats/resources/linked_accounts/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_account_details_and_actions_list import PaginatedAccountDetailsAndActionsList from .types.linked_accounts_list_request_category import LinkedAccountsListRequestCategory @@ -122,9 +122,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountDetailsAndActionsList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountDetailsAndActionsList, parse_obj_as(type_=PaginatedAccountDetailsAndActionsList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -214,13 +214,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.linked_accounts.list() + + + async def main() -> None: + await client.ats.linked_accounts.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/linked-accounts", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountDetailsAndActionsList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountDetailsAndActionsList, parse_obj_as(type_=PaginatedAccountDetailsAndActionsList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/offers/client.py b/src/merge/resources/ats/resources/offers/client.py index c5d8dd52..83b6ace5 100644 --- a/src/merge/resources/ats/resources/offers/client.py +++ b/src/merge/resources/ats/resources/offers/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.offer import Offer from ...types.paginated_offer_list import PaginatedOfferList @@ -125,9 +125,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedOfferList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedOfferList, parse_obj_as(type_=PaginatedOfferList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -193,9 +193,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Offer, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Offer, parse_obj_as(type_=Offer, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -282,13 +282,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.offers.list() + + + async def main() -> None: + await client.ats.offers.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/offers", @@ -311,9 +319,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedOfferList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedOfferList, parse_obj_as(type_=PaginatedOfferList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -358,15 +366,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.offers.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.offers.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/offers/{jsonable_encoder(id)}", @@ -379,9 +395,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Offer, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Offer, parse_obj_as(type_=Offer, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/offices/client.py b/src/merge/resources/ats/resources/offices/client.py index 08813066..fd9e831b 100644 --- a/src/merge/resources/ats/resources/offices/client.py +++ b/src/merge/resources/ats/resources/offices/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.office import Office from ...types.paginated_office_list import PaginatedOfficeList @@ -98,9 +98,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedOfficeList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedOfficeList, parse_obj_as(type_=PaginatedOfficeList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -149,9 +149,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Office, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Office, parse_obj_as(type_=Office, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -218,13 +218,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.offices.list() + + + async def main() -> None: + await client.ats.offices.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/offices", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedOfficeList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedOfficeList, parse_obj_as(type_=PaginatedOfficeList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -277,15 +285,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.offices.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.offices.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/offices/{jsonable_encoder(id)}", @@ -293,9 +309,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Office, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Office, parse_obj_as(type_=Office, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/passthrough/client.py b/src/merge/resources/ats/resources/passthrough/client.py index a04e3f4c..c09558b4 100644 --- a/src/merge/resources/ats/resources/passthrough/client.py +++ b/src/merge/resources/ats/resources/passthrough/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.data_passthrough_request import DataPassthroughRequest from ...types.remote_response import RemoteResponse @@ -55,9 +55,9 @@ def create( _response = self._client_wrapper.httpx_client.request( "ats/v1/passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -88,6 +88,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ats import DataPassthroughRequest, MethodEnum @@ -95,19 +97,25 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.passthrough.create( - request=DataPassthroughRequest( - method=MethodEnum.GET, - path="/scooters", - ), - ) + + + async def main() -> None: + await client.ats.passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/regenerate_key/client.py b/src/merge/resources/ats/resources/regenerate_key/client.py index cb0739d9..8ae50f8a 100644 --- a/src/merge/resources/ats/resources/regenerate_key/client.py +++ b/src/merge/resources/ats/resources/regenerate_key/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.remote_key import RemoteKey @@ -49,9 +49,9 @@ def create(self, *, name: str, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "ats/v1/regenerate-key", method="POST", json={"name": name}, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -81,22 +81,30 @@ async def create(self, *, name: str, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.regenerate_key.create( - name="Remote Deployment Key 1", - ) + + + async def main() -> None: + await client.ats.regenerate_key.create( + name="Remote Deployment Key 1", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/regenerate-key", method="POST", json={"name": name}, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/reject_reasons/client.py b/src/merge/resources/ats/resources/reject_reasons/client.py index af3b42a5..12f9cabe 100644 --- a/src/merge/resources/ats/resources/reject_reasons/client.py +++ b/src/merge/resources/ats/resources/reject_reasons/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_reject_reason_list import PaginatedRejectReasonList from ...types.reject_reason import RejectReason @@ -98,9 +98,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRejectReasonList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRejectReasonList, parse_obj_as(type_=PaginatedRejectReasonList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -149,9 +149,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RejectReason, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RejectReason, parse_obj_as(type_=RejectReason, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -218,13 +218,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.reject_reasons.list() + + + async def main() -> None: + await client.ats.reject_reasons.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/reject-reasons", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRejectReasonList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRejectReasonList, parse_obj_as(type_=PaginatedRejectReasonList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -277,15 +285,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.reject_reasons.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.reject_reasons.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/reject-reasons/{jsonable_encoder(id)}", @@ -293,9 +309,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RejectReason, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RejectReason, parse_obj_as(type_=RejectReason, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/scopes/client.py b/src/merge/resources/ats/resources/scopes/client.py index c695d9dc..bc171728 100644 --- a/src/merge/resources/ats/resources/scopes/client.py +++ b/src/merge/resources/ats/resources/scopes/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.common_model_scope_api import CommonModelScopeApi from ...types.individual_common_model_scope_deserializer_request import IndividualCommonModelScopeDeserializerRequest @@ -47,9 +47,9 @@ def default_scopes_retrieve( _response = self._client_wrapper.httpx_client.request( "ats/v1/default-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -84,9 +84,9 @@ def linked_account_scopes_retrieve( _response = self._client_wrapper.httpx_client.request( "ats/v1/linked-account-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -157,9 +157,9 @@ def linked_account_scopes_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -188,20 +188,28 @@ async def default_scopes_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.scopes.default_scopes_retrieve() + + + async def main() -> None: + await client.ats.scopes.default_scopes_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/default-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -225,20 +233,28 @@ async def linked_account_scopes_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.scopes.linked_account_scopes_retrieve() + + + async def main() -> None: + await client.ats.scopes.linked_account_scopes_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/linked-account-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -268,6 +284,8 @@ async def linked_account_scopes_create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ats import ( IndividualCommonModelScopeDeserializerRequest, @@ -278,29 +296,35 @@ async def linked_account_scopes_create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.scopes.linked_account_scopes_create( - common_models=[ - IndividualCommonModelScopeDeserializerRequest( - model_name="Employee", - model_permissions={ - "READ": ModelPermissionDeserializerRequest( - is_enabled=True, - ), - "WRITE": ModelPermissionDeserializerRequest( - is_enabled=False, - ), - }, - ), - IndividualCommonModelScopeDeserializerRequest( - model_name="Benefit", - model_permissions={ - "WRITE": ModelPermissionDeserializerRequest( - is_enabled=False, - ) - }, - ), - ], - ) + + + async def main() -> None: + await client.ats.scopes.linked_account_scopes_create( + common_models=[ + IndividualCommonModelScopeDeserializerRequest( + model_name="Employee", + model_permissions={ + "READ": ModelPermissionDeserializerRequest( + is_enabled=True, + ), + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ), + }, + ), + IndividualCommonModelScopeDeserializerRequest( + model_name="Benefit", + model_permissions={ + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ) + }, + ), + ], + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/linked-account-scopes", @@ -309,9 +333,9 @@ async def linked_account_scopes_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/scorecards/client.py b/src/merge/resources/ats/resources/scorecards/client.py index caa59646..629a995a 100644 --- a/src/merge/resources/ats/resources/scorecards/client.py +++ b/src/merge/resources/ats/resources/scorecards/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_scorecard_list import PaginatedScorecardList from ...types.scorecard import Scorecard @@ -130,9 +130,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedScorecardList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedScorecardList, parse_obj_as(type_=PaginatedScorecardList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -198,9 +198,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Scorecard, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Scorecard, parse_obj_as(type_=Scorecard, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -291,13 +291,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.scorecards.list() + + + async def main() -> None: + await client.ats.scorecards.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/scorecards", @@ -321,9 +329,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedScorecardList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedScorecardList, parse_obj_as(type_=PaginatedScorecardList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -368,15 +376,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.scorecards.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.scorecards.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/scorecards/{jsonable_encoder(id)}", @@ -389,9 +405,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Scorecard, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Scorecard, parse_obj_as(type_=Scorecard, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/sync_status/client.py b/src/merge/resources/ats/resources/sync_status/client.py index 9892adb4..3df45c5f 100644 --- a/src/merge/resources/ats/resources/sync_status/client.py +++ b/src/merge/resources/ats/resources/sync_status/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_sync_status_list import PaginatedSyncStatusList @@ -56,9 +56,9 @@ def list( params={"cursor": cursor, "page_size": page_size}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedSyncStatusList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedSyncStatusList, parse_obj_as(type_=PaginatedSyncStatusList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -97,13 +97,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.sync_status.list() + + + async def main() -> None: + await client.ats.sync_status.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/sync-status", @@ -111,9 +119,9 @@ async def list( params={"cursor": cursor, "page_size": page_size}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedSyncStatusList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedSyncStatusList, parse_obj_as(type_=PaginatedSyncStatusList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/tags/client.py b/src/merge/resources/ats/resources/tags/client.py index dc195a35..8a47eef3 100644 --- a/src/merge/resources/ats/resources/tags/client.py +++ b/src/merge/resources/ats/resources/tags/client.py @@ -7,7 +7,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_tag_list import PaginatedTagList @@ -96,9 +96,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTagList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTagList, parse_obj_as(type_=PaginatedTagList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -165,13 +165,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.tags.list() + + + async def main() -> None: + await client.ats.tags.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/tags", @@ -189,9 +197,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTagList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTagList, parse_obj_as(type_=PaginatedTagList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/users/client.py b/src/merge/resources/ats/resources/users/client.py index f3beb83d..7076526c 100644 --- a/src/merge/resources/ats/resources/users/client.py +++ b/src/merge/resources/ats/resources/users/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_remote_user_list import PaginatedRemoteUserList from ...types.remote_user import RemoteUser @@ -113,9 +113,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteUserList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteUserList, parse_obj_as(type_=PaginatedRemoteUserList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -176,9 +176,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteUser, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteUser, parse_obj_as(type_=RemoteUser, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -257,13 +257,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.users.list() + + + async def main() -> None: + await client.ats.users.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/users", @@ -284,9 +292,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteUserList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteUserList, parse_obj_as(type_=PaginatedRemoteUserList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -327,15 +335,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.users.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ats.users.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ats/v1/users/{jsonable_encoder(id)}", @@ -347,9 +363,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteUser, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteUser, parse_obj_as(type_=RemoteUser, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/resources/webhook_receivers/client.py b/src/merge/resources/ats/resources/webhook_receivers/client.py index f4db28e5..9d3e63ff 100644 --- a/src/merge/resources/ats/resources/webhook_receivers/client.py +++ b/src/merge/resources/ats/resources/webhook_receivers/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.webhook_receiver import WebhookReceiver @@ -44,9 +44,9 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty _response = self._client_wrapper.httpx_client.request( "ats/v1/webhook-receivers", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[WebhookReceiver], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[WebhookReceiver], parse_obj_as(type_=typing.List[WebhookReceiver], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -99,9 +99,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(WebhookReceiver, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(WebhookReceiver, parse_obj_as(type_=WebhookReceiver, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -128,20 +128,28 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.webhook_receivers.list() + + + async def main() -> None: + await client.ats.webhook_receivers.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/webhook-receivers", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[WebhookReceiver], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[WebhookReceiver], parse_obj_as(type_=typing.List[WebhookReceiver], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -176,16 +184,24 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ats.webhook_receivers.create( - event="event", - is_active=True, - ) + + + async def main() -> None: + await client.ats.webhook_receivers.create( + event="event", + is_active=True, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ats/v1/webhook-receivers", @@ -194,9 +210,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(WebhookReceiver, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(WebhookReceiver, parse_obj_as(type_=WebhookReceiver, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ats/types/account_details.py b/src/merge/resources/ats/types/account_details.py index 81f9587b..58cd348a 100644 --- a/src/merge/resources/ats/types/account_details.py +++ b/src/merge/resources/ats/types/account_details.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .category_enum import CategoryEnum -class AccountDetails(pydantic_v1.BaseModel): +class AccountDetails(UniversalBaseModel): id: typing.Optional[str] integration: typing.Optional[str] integration_slug: typing.Optional[str] @@ -18,27 +18,18 @@ class AccountDetails(pydantic_v1.BaseModel): end_user_email_address: typing.Optional[str] status: typing.Optional[str] webhook_listener_url: typing.Optional[str] - is_duplicate: typing.Optional[bool] = pydantic_v1.Field() + is_duplicate: typing.Optional[bool] = pydantic.Field() """ Whether a Production Linked Account's credentials match another existing Production Linked Account. This field is `null` for Test Linked Accounts, incomplete Production Linked Accounts, and ignored duplicate Production Linked Account sets. """ account_type: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/account_details_and_actions.py b/src/merge/resources/ats/types/account_details_and_actions.py index 6a1b2874..ffc38f58 100644 --- a/src/merge/resources/ats/types/account_details_and_actions.py +++ b/src/merge/resources/ats/types/account_details_and_actions.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_details_and_actions_integration import AccountDetailsAndActionsIntegration from .account_details_and_actions_status_enum import AccountDetailsAndActionsStatusEnum from .category_enum import CategoryEnum -class AccountDetailsAndActions(pydantic_v1.BaseModel): +class AccountDetailsAndActions(UniversalBaseModel): """ # The LinkedAccount Object @@ -30,13 +30,13 @@ class AccountDetailsAndActions(pydantic_v1.BaseModel): end_user_origin_id: typing.Optional[str] end_user_organization_name: str end_user_email_address: str - subdomain: typing.Optional[str] = pydantic_v1.Field() + subdomain: typing.Optional[str] = pydantic.Field() """ The tenant or domain the customer has provided access to. """ webhook_listener_url: str - is_duplicate: typing.Optional[bool] = pydantic_v1.Field() + is_duplicate: typing.Optional[bool] = pydantic.Field() """ Whether a Production Linked Account's credentials match another existing Production Linked Account. This field is `null` for Test Linked Accounts, incomplete Production Linked Accounts, and ignored duplicate Production Linked Account sets. """ @@ -44,20 +44,11 @@ class AccountDetailsAndActions(pydantic_v1.BaseModel): integration: typing.Optional[AccountDetailsAndActionsIntegration] account_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/account_details_and_actions_integration.py b/src/merge/resources/ats/types/account_details_and_actions_integration.py index 7c300a2b..8aa63726 100644 --- a/src/merge/resources/ats/types/account_details_and_actions_integration.py +++ b/src/merge/resources/ats/types/account_details_and_actions_integration.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .categories_enum import CategoriesEnum from .model_operation import ModelOperation -class AccountDetailsAndActionsIntegration(pydantic_v1.BaseModel): +class AccountDetailsAndActionsIntegration(UniversalBaseModel): name: str categories: typing.List[CategoriesEnum] image: typing.Optional[str] @@ -19,20 +19,11 @@ class AccountDetailsAndActionsIntegration(pydantic_v1.BaseModel): passthrough_available: bool available_model_operations: typing.Optional[typing.List[ModelOperation]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/account_integration.py b/src/merge/resources/ats/types/account_integration.py index 57006e35..7a8a27ab 100644 --- a/src/merge/resources/ats/types/account_integration.py +++ b/src/merge/resources/ats/types/account_integration.py @@ -1,69 +1,60 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .categories_enum import CategoriesEnum -class AccountIntegration(pydantic_v1.BaseModel): - name: str = pydantic_v1.Field() +class AccountIntegration(UniversalBaseModel): + name: str = pydantic.Field() """ Company name. """ - categories: typing.Optional[typing.List[CategoriesEnum]] = pydantic_v1.Field() + categories: typing.Optional[typing.List[CategoriesEnum]] = pydantic.Field() """ Category or categories this integration belongs to. Multiple categories should be comma separated, i.e. [ats, hris]. """ - image: typing.Optional[str] = pydantic_v1.Field() + image: typing.Optional[str] = pydantic.Field() """ Company logo in rectangular shape. Upload an image with a clear background. """ - square_image: typing.Optional[str] = pydantic_v1.Field() + square_image: typing.Optional[str] = pydantic.Field() """ Company logo in square shape. Upload an image with a white background. """ - color: typing.Optional[str] = pydantic_v1.Field() + color: typing.Optional[str] = pydantic.Field() """ The color of this integration used for buttons and text throughout the app and landing pages. Choose a darker, saturated color. """ slug: typing.Optional[str] - api_endpoints_to_documentation_urls: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + api_endpoints_to_documentation_urls: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ Mapping of API endpoints to documentation urls for support. Example: {'GET': [['/common-model-scopes', 'https://docs.merge.dev/accounting/common-model-scopes/#common_model_scopes_retrieve'],['/common-model-actions', 'https://docs.merge.dev/accounting/common-model-actions/#common_model_actions_retrieve']], 'POST': []} """ - webhook_setup_guide_url: typing.Optional[str] = pydantic_v1.Field() + webhook_setup_guide_url: typing.Optional[str] = pydantic.Field() """ Setup guide URL for third party webhook creation. Exposed in Merge Docs. """ - category_beta_status: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + category_beta_status: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ Category or categories this integration is in beta status for. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/account_token.py b/src/merge/resources/ats/types/account_token.py index 4794fea3..c280c7cb 100644 --- a/src/merge/resources/ats/types/account_token.py +++ b/src/merge/resources/ats/types/account_token.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_integration import AccountIntegration -class AccountToken(pydantic_v1.BaseModel): +class AccountToken(UniversalBaseModel): account_token: str integration: AccountIntegration - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/activity.py b/src/merge/resources/ats/types/activity.py index a75c372c..db6b651b 100644 --- a/src/merge/resources/ats/types/activity.py +++ b/src/merge/resources/ats/types/activity.py @@ -3,15 +3,16 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .activity_activity_type import ActivityActivityType from .activity_user import ActivityUser from .activity_visibility import ActivityVisibility from .remote_data import RemoteData -class Activity(pydantic_v1.BaseModel): +class Activity(UniversalBaseModel): """ # The Activity Object @@ -25,32 +26,32 @@ class Activity(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - user: typing.Optional[ActivityUser] = pydantic_v1.Field() + user: typing.Optional[ActivityUser] = pydantic.Field() """ The user that performed the action. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's activity was created. """ - activity_type: typing.Optional[ActivityActivityType] = pydantic_v1.Field() + activity_type: typing.Optional[ActivityActivityType] = pydantic.Field() """ The activity's type. @@ -59,17 +60,17 @@ class Activity(pydantic_v1.BaseModel): - `OTHER` - OTHER """ - subject: typing.Optional[str] = pydantic_v1.Field() + subject: typing.Optional[str] = pydantic.Field() """ The activity's subject. """ - body: typing.Optional[str] = pydantic_v1.Field() + body: typing.Optional[str] = pydantic.Field() """ The activity's body. """ - visibility: typing.Optional[ActivityVisibility] = pydantic_v1.Field() + visibility: typing.Optional[ActivityVisibility] = pydantic.Field() """ The activity's visibility. @@ -78,12 +79,12 @@ class Activity(pydantic_v1.BaseModel): - `PRIVATE` - PRIVATE """ - candidate: typing.Optional[str] = pydantic_v1.Field() + candidate: typing.Optional[str] = pydantic.Field() """ The activity’s candidate. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -91,20 +92,11 @@ class Activity(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/activity_request.py b/src/merge/resources/ats/types/activity_request.py index 9b903919..10d135cd 100644 --- a/src/merge/resources/ats/types/activity_request.py +++ b/src/merge/resources/ats/types/activity_request.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .activity_request_activity_type import ActivityRequestActivityType from .activity_request_user import ActivityRequestUser from .activity_request_visibility import ActivityRequestVisibility -class ActivityRequest(pydantic_v1.BaseModel): +class ActivityRequest(UniversalBaseModel): """ # The Activity Object @@ -23,12 +23,12 @@ class ActivityRequest(pydantic_v1.BaseModel): Fetch from the `LIST Activities` endpoint and filter by `ID` to show all activities. """ - user: typing.Optional[ActivityRequestUser] = pydantic_v1.Field() + user: typing.Optional[ActivityRequestUser] = pydantic.Field() """ The user that performed the action. """ - activity_type: typing.Optional[ActivityRequestActivityType] = pydantic_v1.Field() + activity_type: typing.Optional[ActivityRequestActivityType] = pydantic.Field() """ The activity's type. @@ -37,17 +37,17 @@ class ActivityRequest(pydantic_v1.BaseModel): - `OTHER` - OTHER """ - subject: typing.Optional[str] = pydantic_v1.Field() + subject: typing.Optional[str] = pydantic.Field() """ The activity's subject. """ - body: typing.Optional[str] = pydantic_v1.Field() + body: typing.Optional[str] = pydantic.Field() """ The activity's body. """ - visibility: typing.Optional[ActivityRequestVisibility] = pydantic_v1.Field() + visibility: typing.Optional[ActivityRequestVisibility] = pydantic.Field() """ The activity's visibility. @@ -56,7 +56,7 @@ class ActivityRequest(pydantic_v1.BaseModel): - `PRIVATE` - PRIVATE """ - candidate: typing.Optional[str] = pydantic_v1.Field() + candidate: typing.Optional[str] = pydantic.Field() """ The activity’s candidate. """ @@ -64,20 +64,11 @@ class ActivityRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/activity_response.py b/src/merge/resources/ats/types/activity_response.py index 7b71f045..1968fba6 100644 --- a/src/merge/resources/ats/types/activity_response.py +++ b/src/merge/resources/ats/types/activity_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .activity import Activity from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .warning_validation_problem import WarningValidationProblem -class ActivityResponse(pydantic_v1.BaseModel): +class ActivityResponse(UniversalBaseModel): model: Activity warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/advanced_metadata.py b/src/merge/resources/ats/types/advanced_metadata.py index 2aa7d1d6..5b0ec9e2 100644 --- a/src/merge/resources/ats/types/advanced_metadata.py +++ b/src/merge/resources/ats/types/advanced_metadata.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class AdvancedMetadata(pydantic_v1.BaseModel): + +class AdvancedMetadata(UniversalBaseModel): id: str display_name: typing.Optional[str] description: typing.Optional[str] @@ -15,20 +15,11 @@ class AdvancedMetadata(pydantic_v1.BaseModel): is_custom: typing.Optional[bool] field_choices: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/application.py b/src/merge/resources/ats/types/application.py index 671df8fd..889c3490 100644 --- a/src/merge/resources/ats/types/application.py +++ b/src/merge/resources/ats/types/application.py @@ -5,8 +5,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel, update_forward_refs from .application_credited_to import ApplicationCreditedTo from .application_current_stage import ApplicationCurrentStage from .application_job import ApplicationJob @@ -14,7 +15,7 @@ from .remote_data import RemoteData -class Application(pydantic_v1.BaseModel): +class Application(UniversalBaseModel): """ # The Application Object @@ -28,58 +29,58 @@ class Application(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - candidate: typing.Optional[ApplicationCandidate] = pydantic_v1.Field() + candidate: typing.Optional[ApplicationCandidate] = pydantic.Field() """ The candidate applying. """ - job: typing.Optional[ApplicationJob] = pydantic_v1.Field() + job: typing.Optional[ApplicationJob] = pydantic.Field() """ The job being applied for. """ - applied_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + applied_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the application was submitted. """ - rejected_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + rejected_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the application was rejected. """ offers: typing.Optional[typing.List[typing.Optional[ApplicationOffersItem]]] - source: typing.Optional[str] = pydantic_v1.Field() + source: typing.Optional[str] = pydantic.Field() """ The application's source. """ - credited_to: typing.Optional[ApplicationCreditedTo] = pydantic_v1.Field() + credited_to: typing.Optional[ApplicationCreditedTo] = pydantic.Field() """ The user credited for this application. """ - current_stage: typing.Optional[ApplicationCurrentStage] = pydantic_v1.Field() + current_stage: typing.Optional[ApplicationCurrentStage] = pydantic.Field() """ The application's current stage. """ - reject_reason: typing.Optional[ApplicationRejectReason] = pydantic_v1.Field() + reject_reason: typing.Optional[ApplicationRejectReason] = pydantic.Field() """ The application's reason for rejection. """ @@ -88,26 +89,17 @@ class Application(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow from .application_candidate import ApplicationCandidate # noqa: E402 from .application_offers_item import ApplicationOffersItem # noqa: E402 -Application.update_forward_refs() +update_forward_refs(Application) diff --git a/src/merge/resources/ats/types/application_request.py b/src/merge/resources/ats/types/application_request.py index 56e48dca..42770361 100644 --- a/src/merge/resources/ats/types/application_request.py +++ b/src/merge/resources/ats/types/application_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .application_request_candidate import ApplicationRequestCandidate from .application_request_credited_to import ApplicationRequestCreditedTo from .application_request_current_stage import ApplicationRequestCurrentStage @@ -13,7 +14,7 @@ from .application_request_reject_reason import ApplicationRequestRejectReason -class ApplicationRequest(pydantic_v1.BaseModel): +class ApplicationRequest(UniversalBaseModel): """ # The Application Object @@ -26,43 +27,43 @@ class ApplicationRequest(pydantic_v1.BaseModel): Fetch from the `LIST Applications` endpoint and filter by `ID` to show all applications. """ - candidate: typing.Optional[ApplicationRequestCandidate] = pydantic_v1.Field() + candidate: typing.Optional[ApplicationRequestCandidate] = pydantic.Field() """ The candidate applying. """ - job: typing.Optional[ApplicationRequestJob] = pydantic_v1.Field() + job: typing.Optional[ApplicationRequestJob] = pydantic.Field() """ The job being applied for. """ - applied_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + applied_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the application was submitted. """ - rejected_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + rejected_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the application was rejected. """ offers: typing.Optional[typing.List[typing.Optional[ApplicationRequestOffersItem]]] - source: typing.Optional[str] = pydantic_v1.Field() + source: typing.Optional[str] = pydantic.Field() """ The application's source. """ - credited_to: typing.Optional[ApplicationRequestCreditedTo] = pydantic_v1.Field() + credited_to: typing.Optional[ApplicationRequestCreditedTo] = pydantic.Field() """ The user credited for this application. """ - current_stage: typing.Optional[ApplicationRequestCurrentStage] = pydantic_v1.Field() + current_stage: typing.Optional[ApplicationRequestCurrentStage] = pydantic.Field() """ The application's current stage. """ - reject_reason: typing.Optional[ApplicationRequestRejectReason] = pydantic_v1.Field() + reject_reason: typing.Optional[ApplicationRequestRejectReason] = pydantic.Field() """ The application's reason for rejection. """ @@ -71,20 +72,11 @@ class ApplicationRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/application_response.py b/src/merge/resources/ats/types/application_response.py index d4b00062..d44d8351 100644 --- a/src/merge/resources/ats/types/application_response.py +++ b/src/merge/resources/ats/types/application_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .application import Application from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .warning_validation_problem import WarningValidationProblem -class ApplicationResponse(pydantic_v1.BaseModel): +class ApplicationResponse(UniversalBaseModel): model: Application warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/async_passthrough_reciept.py b/src/merge/resources/ats/types/async_passthrough_reciept.py index 2cc33210..f2144443 100644 --- a/src/merge/resources/ats/types/async_passthrough_reciept.py +++ b/src/merge/resources/ats/types/async_passthrough_reciept.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class AsyncPassthroughReciept(pydantic_v1.BaseModel): - async_passthrough_receipt_id: str - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class AsyncPassthroughReciept(UniversalBaseModel): + async_passthrough_receipt_id: str - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/attachment.py b/src/merge/resources/ats/types/attachment.py index 5d773ba6..4f9bcd5c 100644 --- a/src/merge/resources/ats/types/attachment.py +++ b/src/merge/resources/ats/types/attachment.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .attachment_attachment_type import AttachmentAttachmentType from .remote_data import RemoteData -class Attachment(pydantic_v1.BaseModel): +class Attachment(UniversalBaseModel): """ # The Attachment Object @@ -23,37 +24,37 @@ class Attachment(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - file_name: typing.Optional[str] = pydantic_v1.Field() + file_name: typing.Optional[str] = pydantic.Field() """ The attachment's name. """ - file_url: typing.Optional[str] = pydantic_v1.Field() + file_url: typing.Optional[str] = pydantic.Field() """ The attachment's url. """ - candidate: typing.Optional[str] = pydantic_v1.Field() + candidate: typing.Optional[str] = pydantic.Field() """ """ - attachment_type: typing.Optional[AttachmentAttachmentType] = pydantic_v1.Field() + attachment_type: typing.Optional[AttachmentAttachmentType] = pydantic.Field() """ The attachment's type. @@ -67,20 +68,11 @@ class Attachment(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/attachment_request.py b/src/merge/resources/ats/types/attachment_request.py index 80d09024..0cb2176c 100644 --- a/src/merge/resources/ats/types/attachment_request.py +++ b/src/merge/resources/ats/types/attachment_request.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .attachment_request_attachment_type import AttachmentRequestAttachmentType -class AttachmentRequest(pydantic_v1.BaseModel): +class AttachmentRequest(UniversalBaseModel): """ # The Attachment Object @@ -21,22 +21,22 @@ class AttachmentRequest(pydantic_v1.BaseModel): Fetch from the `LIST Attachments` endpoint and view attachments accessible by a company. """ - file_name: typing.Optional[str] = pydantic_v1.Field() + file_name: typing.Optional[str] = pydantic.Field() """ The attachment's name. """ - file_url: typing.Optional[str] = pydantic_v1.Field() + file_url: typing.Optional[str] = pydantic.Field() """ The attachment's url. """ - candidate: typing.Optional[str] = pydantic_v1.Field() + candidate: typing.Optional[str] = pydantic.Field() """ """ - attachment_type: typing.Optional[AttachmentRequestAttachmentType] = pydantic_v1.Field() + attachment_type: typing.Optional[AttachmentRequestAttachmentType] = pydantic.Field() """ The attachment's type. @@ -49,20 +49,11 @@ class AttachmentRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/attachment_response.py b/src/merge/resources/ats/types/attachment_response.py index ba831e07..c54e5ed7 100644 --- a/src/merge/resources/ats/types/attachment_response.py +++ b/src/merge/resources/ats/types/attachment_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .attachment import Attachment from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .warning_validation_problem import WarningValidationProblem -class AttachmentResponse(pydantic_v1.BaseModel): +class AttachmentResponse(UniversalBaseModel): model: Attachment warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/audit_log_event.py b/src/merge/resources/ats/types/audit_log_event.py index 43be24a5..c0b4e43a 100644 --- a/src/merge/resources/ats/types/audit_log_event.py +++ b/src/merge/resources/ats/types/audit_log_event.py @@ -3,25 +3,26 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .audit_log_event_event_type import AuditLogEventEventType from .audit_log_event_role import AuditLogEventRole -class AuditLogEvent(pydantic_v1.BaseModel): +class AuditLogEvent(UniversalBaseModel): id: typing.Optional[str] - user_name: typing.Optional[str] = pydantic_v1.Field() + user_name: typing.Optional[str] = pydantic.Field() """ The User's full name at the time of this Event occurring. """ - user_email: typing.Optional[str] = pydantic_v1.Field() + user_email: typing.Optional[str] = pydantic.Field() """ The User's email at the time of this Event occurring. """ - role: AuditLogEventRole = pydantic_v1.Field() + role: AuditLogEventRole = pydantic.Field() """ Designates the role of the user (or SYSTEM/API if action not taken by a user) at the time of this Event occurring. @@ -34,7 +35,7 @@ class AuditLogEvent(pydantic_v1.BaseModel): """ ip_address: str - event_type: AuditLogEventEventType = pydantic_v1.Field() + event_type: AuditLogEventEventType = pydantic.Field() """ Designates the type of event that occurred. @@ -80,20 +81,11 @@ class AuditLogEvent(pydantic_v1.BaseModel): event_description: str created_at: typing.Optional[dt.datetime] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/available_actions.py b/src/merge/resources/ats/types/available_actions.py index bbd94581..1f1d424c 100644 --- a/src/merge/resources/ats/types/available_actions.py +++ b/src/merge/resources/ats/types/available_actions.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_integration import AccountIntegration from .model_operation import ModelOperation -class AvailableActions(pydantic_v1.BaseModel): +class AvailableActions(UniversalBaseModel): """ # The AvailableActions Object @@ -26,20 +26,11 @@ class AvailableActions(pydantic_v1.BaseModel): passthrough_available: bool available_model_operations: typing.Optional[typing.List[ModelOperation]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/candidate.py b/src/merge/resources/ats/types/candidate.py index 26e44fc5..e8146a0f 100644 --- a/src/merge/resources/ats/types/candidate.py +++ b/src/merge/resources/ats/types/candidate.py @@ -5,8 +5,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel, update_forward_refs from .candidate_attachments_item import CandidateAttachmentsItem from .email_address import EmailAddress from .phone_number import PhoneNumber @@ -14,7 +15,7 @@ from .url import Url -class Candidate(pydantic_v1.BaseModel): +class Candidate(UniversalBaseModel): """ # The Candidate Object @@ -28,67 +29,67 @@ class Candidate(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - first_name: typing.Optional[str] = pydantic_v1.Field() + first_name: typing.Optional[str] = pydantic.Field() """ The candidate's first name. """ - last_name: typing.Optional[str] = pydantic_v1.Field() + last_name: typing.Optional[str] = pydantic.Field() """ The candidate's last name. """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The candidate's current company. """ - title: typing.Optional[str] = pydantic_v1.Field() + title: typing.Optional[str] = pydantic.Field() """ The candidate's current title. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's candidate was created. """ - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's candidate was updated. """ - last_interaction_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + last_interaction_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the most recent interaction with the candidate occurred. """ - is_private: typing.Optional[bool] = pydantic_v1.Field() + is_private: typing.Optional[bool] = pydantic.Field() """ Whether or not the candidate is private. """ - can_email: typing.Optional[bool] = pydantic_v1.Field() + can_email: typing.Optional[bool] = pydantic.Field() """ Whether or not the candidate can be emailed. """ - locations: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + locations: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ The candidate's locations. """ @@ -96,17 +97,17 @@ class Candidate(pydantic_v1.BaseModel): phone_numbers: typing.Optional[typing.List[PhoneNumber]] email_addresses: typing.Optional[typing.List[EmailAddress]] urls: typing.Optional[typing.List[Url]] - tags: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + tags: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ Array of `Tag` names as strings. """ - applications: typing.Optional[typing.List[typing.Optional[CandidateApplicationsItem]]] = pydantic_v1.Field() + applications: typing.Optional[typing.List[typing.Optional[CandidateApplicationsItem]]] = pydantic.Field() """ Array of `Application` object IDs. """ - attachments: typing.Optional[typing.List[typing.Optional[CandidateAttachmentsItem]]] = pydantic_v1.Field() + attachments: typing.Optional[typing.List[typing.Optional[CandidateAttachmentsItem]]] = pydantic.Field() """ Array of `Attachment` object IDs. """ @@ -115,25 +116,16 @@ class Candidate(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow from .candidate_applications_item import CandidateApplicationsItem # noqa: E402 -Candidate.update_forward_refs() +update_forward_refs(Candidate) diff --git a/src/merge/resources/ats/types/candidate_request.py b/src/merge/resources/ats/types/candidate_request.py index d80ba1a5..9dbc8e68 100644 --- a/src/merge/resources/ats/types/candidate_request.py +++ b/src/merge/resources/ats/types/candidate_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .candidate_request_applications_item import CandidateRequestApplicationsItem from .candidate_request_attachments_item import CandidateRequestAttachmentsItem from .email_address_request import EmailAddressRequest @@ -12,7 +13,7 @@ from .url_request import UrlRequest -class CandidateRequest(pydantic_v1.BaseModel): +class CandidateRequest(UniversalBaseModel): """ # The Candidate Object @@ -25,42 +26,42 @@ class CandidateRequest(pydantic_v1.BaseModel): Fetch from the `LIST Candidates` endpoint and filter by `ID` to show all candidates. """ - first_name: typing.Optional[str] = pydantic_v1.Field() + first_name: typing.Optional[str] = pydantic.Field() """ The candidate's first name. """ - last_name: typing.Optional[str] = pydantic_v1.Field() + last_name: typing.Optional[str] = pydantic.Field() """ The candidate's last name. """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The candidate's current company. """ - title: typing.Optional[str] = pydantic_v1.Field() + title: typing.Optional[str] = pydantic.Field() """ The candidate's current title. """ - last_interaction_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + last_interaction_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the most recent interaction with the candidate occurred. """ - is_private: typing.Optional[bool] = pydantic_v1.Field() + is_private: typing.Optional[bool] = pydantic.Field() """ Whether or not the candidate is private. """ - can_email: typing.Optional[bool] = pydantic_v1.Field() + can_email: typing.Optional[bool] = pydantic.Field() """ Whether or not the candidate can be emailed. """ - locations: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + locations: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ The candidate's locations. """ @@ -68,17 +69,17 @@ class CandidateRequest(pydantic_v1.BaseModel): phone_numbers: typing.Optional[typing.List[PhoneNumberRequest]] email_addresses: typing.Optional[typing.List[EmailAddressRequest]] urls: typing.Optional[typing.List[UrlRequest]] - tags: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + tags: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ Array of `Tag` names as strings. """ - applications: typing.Optional[typing.List[typing.Optional[CandidateRequestApplicationsItem]]] = pydantic_v1.Field() + applications: typing.Optional[typing.List[typing.Optional[CandidateRequestApplicationsItem]]] = pydantic.Field() """ Array of `Application` object IDs. """ - attachments: typing.Optional[typing.List[typing.Optional[CandidateRequestAttachmentsItem]]] = pydantic_v1.Field() + attachments: typing.Optional[typing.List[typing.Optional[CandidateRequestAttachmentsItem]]] = pydantic.Field() """ Array of `Attachment` object IDs. """ @@ -87,20 +88,11 @@ class CandidateRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/candidate_response.py b/src/merge/resources/ats/types/candidate_response.py index af94e809..8053d441 100644 --- a/src/merge/resources/ats/types/candidate_response.py +++ b/src/merge/resources/ats/types/candidate_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .candidate import Candidate from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .warning_validation_problem import WarningValidationProblem -class CandidateResponse(pydantic_v1.BaseModel): +class CandidateResponse(UniversalBaseModel): model: Candidate warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/common_model_scope_api.py b/src/merge/resources/ats/types/common_model_scope_api.py index d6fdec56..093001ba 100644 --- a/src/merge/resources/ats/types/common_model_scope_api.py +++ b/src/merge/resources/ats/types/common_model_scope_api.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .individual_common_model_scope_deserializer import IndividualCommonModelScopeDeserializer -class CommonModelScopeApi(pydantic_v1.BaseModel): - common_models: typing.List[IndividualCommonModelScopeDeserializer] = pydantic_v1.Field() +class CommonModelScopeApi(UniversalBaseModel): + common_models: typing.List[IndividualCommonModelScopeDeserializer] = pydantic.Field() """ The common models you want to update the scopes for """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/common_model_scopes_body_request.py b/src/merge/resources/ats/types/common_model_scopes_body_request.py index e956cbdc..9098f031 100644 --- a/src/merge/resources/ats/types/common_model_scopes_body_request.py +++ b/src/merge/resources/ats/types/common_model_scopes_body_request.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .enabled_actions_enum import EnabledActionsEnum -class CommonModelScopesBodyRequest(pydantic_v1.BaseModel): +class CommonModelScopesBodyRequest(UniversalBaseModel): model_id: str enabled_actions: typing.List[EnabledActionsEnum] disabled_fields: typing.List[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/data_passthrough_request.py b/src/merge/resources/ats/types/data_passthrough_request.py index fdf9b18c..3f9ee090 100644 --- a/src/merge/resources/ats/types/data_passthrough_request.py +++ b/src/merge/resources/ats/types/data_passthrough_request.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .method_enum import MethodEnum from .multipart_form_field_request import MultipartFormFieldRequest from .request_format_enum import RequestFormatEnum -class DataPassthroughRequest(pydantic_v1.BaseModel): +class DataPassthroughRequest(UniversalBaseModel): """ # The DataPassthrough Object @@ -24,51 +24,42 @@ class DataPassthroughRequest(pydantic_v1.BaseModel): """ method: MethodEnum - path: str = pydantic_v1.Field() + path: str = pydantic.Field() """ The path of the request in the third party's platform. """ - base_url_override: typing.Optional[str] = pydantic_v1.Field() + base_url_override: typing.Optional[str] = pydantic.Field() """ An optional override of the third party's base url for the request. """ - data: typing.Optional[str] = pydantic_v1.Field() + data: typing.Optional[str] = pydantic.Field() """ The data with the request. You must include a `request_format` parameter matching the data's format """ - multipart_form_data: typing.Optional[typing.List[MultipartFormFieldRequest]] = pydantic_v1.Field() + multipart_form_data: typing.Optional[typing.List[MultipartFormFieldRequest]] = pydantic.Field() """ Pass an array of `MultipartFormField` objects in here instead of using the `data` param if `request_format` is set to `MULTIPART`. """ - headers: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + headers: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ The headers to use for the request (Merge will handle the account's authorization headers). `Content-Type` header is required for passthrough. Choose content type corresponding to expected format of receiving server. """ request_format: typing.Optional[RequestFormatEnum] - normalize_response: typing.Optional[bool] = pydantic_v1.Field() + normalize_response: typing.Optional[bool] = pydantic.Field() """ Optional. If true, the response will always be an object of the form `{"type": T, "value": ...}` where `T` will be one of `string, boolean, number, null, array, object`. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/debug_mode_log.py b/src/merge/resources/ats/types/debug_mode_log.py index 321c9090..8edea2ae 100644 --- a/src/merge/resources/ats/types/debug_mode_log.py +++ b/src/merge/resources/ats/types/debug_mode_log.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_model_log_summary import DebugModelLogSummary -class DebugModeLog(pydantic_v1.BaseModel): +class DebugModeLog(UniversalBaseModel): log_id: str dashboard_view: str log_summary: DebugModelLogSummary - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/debug_model_log_summary.py b/src/merge/resources/ats/types/debug_model_log_summary.py index 06bb154b..4f5b07db 100644 --- a/src/merge/resources/ats/types/debug_model_log_summary.py +++ b/src/merge/resources/ats/types/debug_model_log_summary.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class DebugModelLogSummary(pydantic_v1.BaseModel): + +class DebugModelLogSummary(UniversalBaseModel): url: str method: str status_code: int - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/department.py b/src/merge/resources/ats/types/department.py index 9d59ebeb..1a6db808 100644 --- a/src/merge/resources/ats/types/department.py +++ b/src/merge/resources/ats/types/department.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData -class Department(pydantic_v1.BaseModel): +class Department(UniversalBaseModel): """ # The Department Object @@ -22,27 +23,27 @@ class Department(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The department's name. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -50,20 +51,11 @@ class Department(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/eeoc.py b/src/merge/resources/ats/types/eeoc.py index 53253cbd..e060191a 100644 --- a/src/merge/resources/ats/types/eeoc.py +++ b/src/merge/resources/ats/types/eeoc.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .eeoc_candidate import EeocCandidate from .eeoc_disability_status import EeocDisabilityStatus from .eeoc_gender import EeocGender @@ -13,7 +14,7 @@ from .remote_data import RemoteData -class Eeoc(pydantic_v1.BaseModel): +class Eeoc(UniversalBaseModel): """ # The EEOC Object @@ -27,32 +28,32 @@ class Eeoc(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - candidate: typing.Optional[EeocCandidate] = pydantic_v1.Field() + candidate: typing.Optional[EeocCandidate] = pydantic.Field() """ The candidate being represented. """ - submitted_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + submitted_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the information was submitted. """ - race: typing.Optional[EeocRace] = pydantic_v1.Field() + race: typing.Optional[EeocRace] = pydantic.Field() """ The candidate's race. @@ -66,7 +67,7 @@ class Eeoc(pydantic_v1.BaseModel): - `DECLINE_TO_SELF_IDENTIFY` - DECLINE_TO_SELF_IDENTIFY """ - gender: typing.Optional[EeocGender] = pydantic_v1.Field() + gender: typing.Optional[EeocGender] = pydantic.Field() """ The candidate's gender. @@ -77,7 +78,7 @@ class Eeoc(pydantic_v1.BaseModel): - `DECLINE_TO_SELF_IDENTIFY` - DECLINE_TO_SELF_IDENTIFY """ - veteran_status: typing.Optional[EeocVeteranStatus] = pydantic_v1.Field() + veteran_status: typing.Optional[EeocVeteranStatus] = pydantic.Field() """ The candidate's veteran status. @@ -86,7 +87,7 @@ class Eeoc(pydantic_v1.BaseModel): - `I_DONT_WISH_TO_ANSWER` - I_DONT_WISH_TO_ANSWER """ - disability_status: typing.Optional[EeocDisabilityStatus] = pydantic_v1.Field() + disability_status: typing.Optional[EeocDisabilityStatus] = pydantic.Field() """ The candidate's disability status. @@ -95,7 +96,7 @@ class Eeoc(pydantic_v1.BaseModel): - `I_DONT_WISH_TO_ANSWER` - I_DONT_WISH_TO_ANSWER """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -103,20 +104,11 @@ class Eeoc(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/email_address.py b/src/merge/resources/ats/types/email_address.py index 6e9e3536..e00b3735 100644 --- a/src/merge/resources/ats/types/email_address.py +++ b/src/merge/resources/ats/types/email_address.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .email_address_email_address_type import EmailAddressEmailAddressType -class EmailAddress(pydantic_v1.BaseModel): +class EmailAddress(UniversalBaseModel): """ # The EmailAddress Object @@ -21,22 +22,22 @@ class EmailAddress(pydantic_v1.BaseModel): Fetch from the `GET Candidate` endpoint and view their email addresses. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - value: typing.Optional[str] = pydantic_v1.Field() + value: typing.Optional[str] = pydantic.Field() """ The email address. """ - email_address_type: typing.Optional[EmailAddressEmailAddressType] = pydantic_v1.Field() + email_address_type: typing.Optional[EmailAddressEmailAddressType] = pydantic.Field() """ The type of email address. @@ -45,20 +46,11 @@ class EmailAddress(pydantic_v1.BaseModel): - `OTHER` - OTHER """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/email_address_request.py b/src/merge/resources/ats/types/email_address_request.py index 728ae9c9..d27723f2 100644 --- a/src/merge/resources/ats/types/email_address_request.py +++ b/src/merge/resources/ats/types/email_address_request.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .email_address_request_email_address_type import EmailAddressRequestEmailAddressType -class EmailAddressRequest(pydantic_v1.BaseModel): +class EmailAddressRequest(UniversalBaseModel): """ # The EmailAddress Object @@ -21,12 +21,12 @@ class EmailAddressRequest(pydantic_v1.BaseModel): Fetch from the `GET Candidate` endpoint and view their email addresses. """ - value: typing.Optional[str] = pydantic_v1.Field() + value: typing.Optional[str] = pydantic.Field() """ The email address. """ - email_address_type: typing.Optional[EmailAddressRequestEmailAddressType] = pydantic_v1.Field() + email_address_type: typing.Optional[EmailAddressRequestEmailAddressType] = pydantic.Field() """ The type of email address. @@ -38,20 +38,11 @@ class EmailAddressRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/error_validation_problem.py b/src/merge/resources/ats/types/error_validation_problem.py index 425af45c..3838491d 100644 --- a/src/merge/resources/ats/types/error_validation_problem.py +++ b/src/merge/resources/ats/types/error_validation_problem.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .validation_problem_source import ValidationProblemSource -class ErrorValidationProblem(pydantic_v1.BaseModel): +class ErrorValidationProblem(UniversalBaseModel): source: typing.Optional[ValidationProblemSource] title: str detail: str problem_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/external_target_field_api.py b/src/merge/resources/ats/types/external_target_field_api.py index a97d536a..8a971c64 100644 --- a/src/merge/resources/ats/types/external_target_field_api.py +++ b/src/merge/resources/ats/types/external_target_field_api.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ExternalTargetFieldApi(pydantic_v1.BaseModel): + +class ExternalTargetFieldApi(UniversalBaseModel): name: typing.Optional[str] description: typing.Optional[str] is_mapped: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/external_target_field_api_response.py b/src/merge/resources/ats/types/external_target_field_api_response.py index c7853353..dad6b342 100644 --- a/src/merge/resources/ats/types/external_target_field_api_response.py +++ b/src/merge/resources/ats/types/external_target_field_api_response.py @@ -1,51 +1,40 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .external_target_field_api import ExternalTargetFieldApi -class ExternalTargetFieldApiResponse(pydantic_v1.BaseModel): - activity: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Activity") - application: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Application") - attachment: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Attachment") - candidate: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Candidate") - department: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Department") - eeoc: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="EEOC") - scheduled_interview: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field( +class ExternalTargetFieldApiResponse(UniversalBaseModel): + activity: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Activity") + application: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Application") + attachment: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Attachment") + candidate: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Candidate") + department: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Department") + eeoc: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="EEOC") + scheduled_interview: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field( alias="ScheduledInterview" ) - job: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Job") - job_posting: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="JobPosting") - job_interview_stage: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field( + job: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Job") + job_posting: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="JobPosting") + job_interview_stage: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field( alias="JobInterviewStage" ) - offer: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Offer") - office: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Office") - reject_reason: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="RejectReason") - scorecard: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Scorecard") - tag: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Tag") - remote_user: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="RemoteUser") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + offer: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Offer") + office: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Office") + reject_reason: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="RejectReason") + scorecard: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Scorecard") + tag: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Tag") + remote_user: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="RemoteUser") + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/field_mapping_api_instance.py b/src/merge/resources/ats/types/field_mapping_api_instance.py index d9d7670d..8af85a52 100644 --- a/src/merge/resources/ats/types/field_mapping_api_instance.py +++ b/src/merge/resources/ats/types/field_mapping_api_instance.py @@ -1,34 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_mapping_api_instance_remote_field import FieldMappingApiInstanceRemoteField from .field_mapping_api_instance_target_field import FieldMappingApiInstanceTargetField -class FieldMappingApiInstance(pydantic_v1.BaseModel): +class FieldMappingApiInstance(UniversalBaseModel): id: typing.Optional[str] is_integration_wide: typing.Optional[bool] target_field: typing.Optional[FieldMappingApiInstanceTargetField] remote_field: typing.Optional[FieldMappingApiInstanceRemoteField] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/field_mapping_api_instance_remote_field.py b/src/merge/resources/ats/types/field_mapping_api_instance_remote_field.py index 9539d3dc..0635b76f 100644 --- a/src/merge/resources/ats/types/field_mapping_api_instance_remote_field.py +++ b/src/merge/resources/ats/types/field_mapping_api_instance_remote_field.py @@ -1,36 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_mapping_api_instance_remote_field_remote_endpoint_info import ( FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo, ) -class FieldMappingApiInstanceRemoteField(pydantic_v1.BaseModel): +class FieldMappingApiInstanceRemoteField(UniversalBaseModel): remote_key_name: str - schema_: typing.Dict[str, typing.Any] = pydantic_v1.Field(alias="schema") + schema_: typing.Dict[str, typing.Any] = pydantic.Field(alias="schema") remote_endpoint_info: FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py b/src/merge/resources/ats/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py index d9fcc276..e34eb6e4 100644 --- a/src/merge/resources/ats/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py +++ b/src/merge/resources/ats/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo(pydantic_v1.BaseModel): + +class FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo(UniversalBaseModel): method: typing.Optional[str] url_path: typing.Optional[str] field_traversal_path: typing.Optional[typing.List[str]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/field_mapping_api_instance_response.py b/src/merge/resources/ats/types/field_mapping_api_instance_response.py index 18af1188..d4a5c6d9 100644 --- a/src/merge/resources/ats/types/field_mapping_api_instance_response.py +++ b/src/merge/resources/ats/types/field_mapping_api_instance_response.py @@ -1,51 +1,40 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_mapping_api_instance import FieldMappingApiInstance -class FieldMappingApiInstanceResponse(pydantic_v1.BaseModel): - activity: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Activity") - application: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Application") - attachment: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Attachment") - candidate: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Candidate") - department: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Department") - eeoc: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="EEOC") - scheduled_interview: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field( +class FieldMappingApiInstanceResponse(UniversalBaseModel): + activity: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Activity") + application: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Application") + attachment: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Attachment") + candidate: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Candidate") + department: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Department") + eeoc: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="EEOC") + scheduled_interview: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field( alias="ScheduledInterview" ) - job: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Job") - job_posting: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="JobPosting") - job_interview_stage: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field( + job: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Job") + job_posting: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="JobPosting") + job_interview_stage: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field( alias="JobInterviewStage" ) - offer: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Offer") - office: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Office") - reject_reason: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="RejectReason") - scorecard: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Scorecard") - tag: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Tag") - remote_user: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="RemoteUser") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + offer: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Offer") + office: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Office") + reject_reason: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="RejectReason") + scorecard: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Scorecard") + tag: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Tag") + remote_user: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="RemoteUser") + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/field_mapping_api_instance_target_field.py b/src/merge/resources/ats/types/field_mapping_api_instance_target_field.py index 25a8dcff..c590d4ce 100644 --- a/src/merge/resources/ats/types/field_mapping_api_instance_target_field.py +++ b/src/merge/resources/ats/types/field_mapping_api_instance_target_field.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldMappingApiInstanceTargetField(pydantic_v1.BaseModel): + +class FieldMappingApiInstanceTargetField(UniversalBaseModel): name: str description: str is_organization_wide: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/field_mapping_instance_response.py b/src/merge/resources/ats/types/field_mapping_instance_response.py index b55d2c40..aaf06f0e 100644 --- a/src/merge/resources/ats/types/field_mapping_instance_response.py +++ b/src/merge/resources/ats/types/field_mapping_instance_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .field_mapping_api_instance import FieldMappingApiInstance from .warning_validation_problem import WarningValidationProblem -class FieldMappingInstanceResponse(pydantic_v1.BaseModel): +class FieldMappingInstanceResponse(UniversalBaseModel): model: FieldMappingApiInstance warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/field_permission_deserializer.py b/src/merge/resources/ats/types/field_permission_deserializer.py index 124f3deb..ed80b9d6 100644 --- a/src/merge/resources/ats/types/field_permission_deserializer.py +++ b/src/merge/resources/ats/types/field_permission_deserializer.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldPermissionDeserializer(pydantic_v1.BaseModel): + +class FieldPermissionDeserializer(UniversalBaseModel): enabled: typing.Optional[typing.List[typing.Any]] disabled: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/field_permission_deserializer_request.py b/src/merge/resources/ats/types/field_permission_deserializer_request.py index 65e80e75..e937e743 100644 --- a/src/merge/resources/ats/types/field_permission_deserializer_request.py +++ b/src/merge/resources/ats/types/field_permission_deserializer_request.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldPermissionDeserializerRequest(pydantic_v1.BaseModel): + +class FieldPermissionDeserializerRequest(UniversalBaseModel): enabled: typing.Optional[typing.List[typing.Any]] disabled: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/individual_common_model_scope_deserializer.py b/src/merge/resources/ats/types/individual_common_model_scope_deserializer.py index d80ca06e..ffa55055 100644 --- a/src/merge/resources/ats/types/individual_common_model_scope_deserializer.py +++ b/src/merge/resources/ats/types/individual_common_model_scope_deserializer.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_permission_deserializer import FieldPermissionDeserializer from .model_permission_deserializer import ModelPermissionDeserializer -class IndividualCommonModelScopeDeserializer(pydantic_v1.BaseModel): +class IndividualCommonModelScopeDeserializer(UniversalBaseModel): model_name: str model_permissions: typing.Optional[typing.Dict[str, ModelPermissionDeserializer]] field_permissions: typing.Optional[FieldPermissionDeserializer] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/individual_common_model_scope_deserializer_request.py b/src/merge/resources/ats/types/individual_common_model_scope_deserializer_request.py index 8f2e7de5..d0e68f6d 100644 --- a/src/merge/resources/ats/types/individual_common_model_scope_deserializer_request.py +++ b/src/merge/resources/ats/types/individual_common_model_scope_deserializer_request.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_permission_deserializer_request import FieldPermissionDeserializerRequest from .model_permission_deserializer_request import ModelPermissionDeserializerRequest -class IndividualCommonModelScopeDeserializerRequest(pydantic_v1.BaseModel): +class IndividualCommonModelScopeDeserializerRequest(UniversalBaseModel): model_name: str model_permissions: typing.Optional[typing.Dict[str, ModelPermissionDeserializerRequest]] field_permissions: typing.Optional[FieldPermissionDeserializerRequest] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/issue.py b/src/merge/resources/ats/types/issue.py index 086a0db2..28366731 100644 --- a/src/merge/resources/ats/types/issue.py +++ b/src/merge/resources/ats/types/issue.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .issue_status import IssueStatus -class Issue(pydantic_v1.BaseModel): +class Issue(UniversalBaseModel): id: typing.Optional[str] - status: typing.Optional[IssueStatus] = pydantic_v1.Field() + status: typing.Optional[IssueStatus] = pydantic.Field() """ Status of the issue. Options: ('ONGOING', 'RESOLVED') @@ -25,20 +26,11 @@ class Issue(pydantic_v1.BaseModel): is_muted: typing.Optional[bool] error_details: typing.Optional[typing.List[str]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/job.py b/src/merge/resources/ats/types/job.py index 041ebbf6..39c9446d 100644 --- a/src/merge/resources/ats/types/job.py +++ b/src/merge/resources/ats/types/job.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .job_departments_item import JobDepartmentsItem from .job_hiring_managers_item import JobHiringManagersItem from .job_offices_item import JobOfficesItem @@ -15,7 +16,7 @@ from .url import Url -class Job(pydantic_v1.BaseModel): +class Job(UniversalBaseModel): """ # The Job Object @@ -29,37 +30,37 @@ class Job(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The job's name. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The job's description. """ - code: typing.Optional[str] = pydantic_v1.Field() + code: typing.Optional[str] = pydantic.Field() """ The job's code. Typically an additional identifier used to reference the particular job that is displayed on the ATS. """ - status: typing.Optional[JobStatus] = pydantic_v1.Field() + status: typing.Optional[JobStatus] = pydantic.Field() """ The job's status. @@ -70,7 +71,7 @@ class Job(pydantic_v1.BaseModel): - `PENDING` - PENDING """ - type: typing.Optional[JobTypeEnum] = pydantic_v1.Field() + type: typing.Optional[JobTypeEnum] = pydantic.Field() """ The job's type. @@ -79,48 +80,48 @@ class Job(pydantic_v1.BaseModel): - `PROFILE` - PROFILE """ - job_postings: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + job_postings: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ IDs of `JobPosting` objects that serve as job postings for this `Job`. """ job_posting_urls: typing.Optional[typing.List[Url]] - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's job was created. """ - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's job was updated. """ - confidential: typing.Optional[bool] = pydantic_v1.Field() + confidential: typing.Optional[bool] = pydantic.Field() """ Whether the job is confidential. """ - departments: typing.Optional[typing.List[typing.Optional[JobDepartmentsItem]]] = pydantic_v1.Field() + departments: typing.Optional[typing.List[typing.Optional[JobDepartmentsItem]]] = pydantic.Field() """ IDs of `Department` objects for this `Job`. """ - offices: typing.Optional[typing.List[typing.Optional[JobOfficesItem]]] = pydantic_v1.Field() + offices: typing.Optional[typing.List[typing.Optional[JobOfficesItem]]] = pydantic.Field() """ IDs of `Office` objects for this `Job`. """ - hiring_managers: typing.Optional[typing.List[typing.Optional[JobHiringManagersItem]]] = pydantic_v1.Field() + hiring_managers: typing.Optional[typing.List[typing.Optional[JobHiringManagersItem]]] = pydantic.Field() """ IDs of `RemoteUser` objects that serve as hiring managers for this `Job`. """ - recruiters: typing.Optional[typing.List[typing.Optional[JobRecruitersItem]]] = pydantic_v1.Field() + recruiters: typing.Optional[typing.List[typing.Optional[JobRecruitersItem]]] = pydantic.Field() """ IDs of `RemoteUser` objects that serve as recruiters for this `Job`. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -128,20 +129,11 @@ class Job(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/job_interview_stage.py b/src/merge/resources/ats/types/job_interview_stage.py index 168cd4f2..3335819c 100644 --- a/src/merge/resources/ats/types/job_interview_stage.py +++ b/src/merge/resources/ats/types/job_interview_stage.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .job_interview_stage_job import JobInterviewStageJob from .remote_data import RemoteData -class JobInterviewStage(pydantic_v1.BaseModel): +class JobInterviewStage(UniversalBaseModel): """ # The JobInterviewStage Object @@ -23,37 +24,37 @@ class JobInterviewStage(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ Standard stage names are offered by ATS systems but can be modified by users. """ - job: typing.Optional[JobInterviewStageJob] = pydantic_v1.Field() + job: typing.Optional[JobInterviewStageJob] = pydantic.Field() """ This field is populated only if the stage is specific to a particular job. If the stage is generic, this field will not be populated. """ - stage_order: typing.Optional[int] = pydantic_v1.Field() + stage_order: typing.Optional[int] = pydantic.Field() """ The stage’s order, with the lowest values ordered first. If the third-party does not return details on the order of stages, this field will not be populated. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -61,20 +62,11 @@ class JobInterviewStage(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/job_posting.py b/src/merge/resources/ats/types/job_posting.py index 05ae4b2a..1d6e7bfc 100644 --- a/src/merge/resources/ats/types/job_posting.py +++ b/src/merge/resources/ats/types/job_posting.py @@ -3,15 +3,16 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .job_posting_job import JobPostingJob from .job_posting_job_posting_urls_item import JobPostingJobPostingUrlsItem from .job_posting_status_enum import JobPostingStatusEnum from .remote_data import RemoteData -class JobPosting(pydantic_v1.BaseModel): +class JobPosting(UniversalBaseModel): """ # The JobPosting Object @@ -25,37 +26,37 @@ class JobPosting(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - title: typing.Optional[str] = pydantic_v1.Field() + title: typing.Optional[str] = pydantic.Field() """ The job posting’s title. """ - job_posting_urls: typing.Optional[typing.List[JobPostingJobPostingUrlsItem]] = pydantic_v1.Field() + job_posting_urls: typing.Optional[typing.List[JobPostingJobPostingUrlsItem]] = pydantic.Field() """ The Url object is used to represent hyperlinks for a candidate to apply to a given job. """ - job: typing.Optional[JobPostingJob] = pydantic_v1.Field() + job: typing.Optional[JobPostingJob] = pydantic.Field() """ ID of `Job` object for this `JobPosting`. """ - status: typing.Optional[JobPostingStatusEnum] = pydantic_v1.Field() + status: typing.Optional[JobPostingStatusEnum] = pydantic.Field() """ The job posting's status. @@ -66,27 +67,27 @@ class JobPosting(pydantic_v1.BaseModel): - `PENDING` - PENDING """ - content: typing.Optional[str] = pydantic_v1.Field() + content: typing.Optional[str] = pydantic.Field() """ The job posting’s content. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's job posting was created. """ - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's job posting was updated. """ - is_internal: typing.Optional[bool] = pydantic_v1.Field() + is_internal: typing.Optional[bool] = pydantic.Field() """ Indicates whether the job posting is internal or external. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -94,20 +95,11 @@ class JobPosting(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/link_token.py b/src/merge/resources/ats/types/link_token.py index 1c82d1ac..87c88faf 100644 --- a/src/merge/resources/ats/types/link_token.py +++ b/src/merge/resources/ats/types/link_token.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class LinkToken(pydantic_v1.BaseModel): + +class LinkToken(UniversalBaseModel): link_token: str integration_name: typing.Optional[str] magic_link_url: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/linked_account_status.py b/src/merge/resources/ats/types/linked_account_status.py index 60e21a98..34184012 100644 --- a/src/merge/resources/ats/types/linked_account_status.py +++ b/src/merge/resources/ats/types/linked_account_status.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class LinkedAccountStatus(pydantic_v1.BaseModel): + +class LinkedAccountStatus(UniversalBaseModel): linked_account_status: str can_make_request: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/meta_response.py b/src/merge/resources/ats/types/meta_response.py index debaf4ef..27e02126 100644 --- a/src/merge/resources/ats/types/meta_response.py +++ b/src/merge/resources/ats/types/meta_response.py @@ -1,34 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .linked_account_status import LinkedAccountStatus -class MetaResponse(pydantic_v1.BaseModel): +class MetaResponse(UniversalBaseModel): request_schema: typing.Dict[str, typing.Any] remote_field_classes: typing.Optional[typing.Dict[str, typing.Any]] status: typing.Optional[LinkedAccountStatus] has_conditional_params: bool has_required_linked_account_params: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/model_operation.py b/src/merge/resources/ats/types/model_operation.py index 0f4429ec..efe8355e 100644 --- a/src/merge/resources/ats/types/model_operation.py +++ b/src/merge/resources/ats/types/model_operation.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelOperation(pydantic_v1.BaseModel): + +class ModelOperation(UniversalBaseModel): """ # The ModelOperation Object @@ -25,20 +25,11 @@ class ModelOperation(pydantic_v1.BaseModel): required_post_parameters: typing.List[str] supported_fields: typing.List[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/model_permission_deserializer.py b/src/merge/resources/ats/types/model_permission_deserializer.py index 5a6adf20..14bc4f99 100644 --- a/src/merge/resources/ats/types/model_permission_deserializer.py +++ b/src/merge/resources/ats/types/model_permission_deserializer.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelPermissionDeserializer(pydantic_v1.BaseModel): - is_enabled: typing.Optional[bool] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ModelPermissionDeserializer(UniversalBaseModel): + is_enabled: typing.Optional[bool] - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/model_permission_deserializer_request.py b/src/merge/resources/ats/types/model_permission_deserializer_request.py index 3f72b9ac..cc2e7f77 100644 --- a/src/merge/resources/ats/types/model_permission_deserializer_request.py +++ b/src/merge/resources/ats/types/model_permission_deserializer_request.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelPermissionDeserializerRequest(pydantic_v1.BaseModel): - is_enabled: typing.Optional[bool] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ModelPermissionDeserializerRequest(UniversalBaseModel): + is_enabled: typing.Optional[bool] - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/multipart_form_field_request.py b/src/merge/resources/ats/types/multipart_form_field_request.py index 9c8ffb21..b6a6c708 100644 --- a/src/merge/resources/ats/types/multipart_form_field_request.py +++ b/src/merge/resources/ats/types/multipart_form_field_request.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .multipart_form_field_request_encoding import MultipartFormFieldRequestEncoding -class MultipartFormFieldRequest(pydantic_v1.BaseModel): +class MultipartFormFieldRequest(UniversalBaseModel): """ # The MultipartFormField Object @@ -21,17 +21,17 @@ class MultipartFormFieldRequest(pydantic_v1.BaseModel): Create a `MultipartFormField` to define a multipart form entry. """ - name: str = pydantic_v1.Field() + name: str = pydantic.Field() """ The name of the form field """ - data: str = pydantic_v1.Field() + data: str = pydantic.Field() """ The data for the form field. """ - encoding: typing.Optional[MultipartFormFieldRequestEncoding] = pydantic_v1.Field() + encoding: typing.Optional[MultipartFormFieldRequestEncoding] = pydantic.Field() """ The encoding of the value of `data`. Defaults to `RAW` if not defined. @@ -40,30 +40,21 @@ class MultipartFormFieldRequest(pydantic_v1.BaseModel): - `GZIP_BASE64` - GZIP_BASE64 """ - file_name: typing.Optional[str] = pydantic_v1.Field() + file_name: typing.Optional[str] = pydantic.Field() """ The file name of the form field, if the field is for a file. """ - content_type: typing.Optional[str] = pydantic_v1.Field() + content_type: typing.Optional[str] = pydantic.Field() """ The MIME type of the file, if the field is for a file. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/offer.py b/src/merge/resources/ats/types/offer.py index 2754325b..8302b50c 100644 --- a/src/merge/resources/ats/types/offer.py +++ b/src/merge/resources/ats/types/offer.py @@ -5,14 +5,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel, update_forward_refs from .offer_creator import OfferCreator from .offer_status import OfferStatus from .remote_data import RemoteData -class Offer(pydantic_v1.BaseModel): +class Offer(UniversalBaseModel): """ # The Offer Object @@ -26,52 +27,52 @@ class Offer(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - application: typing.Optional[OfferApplication] = pydantic_v1.Field() + application: typing.Optional[OfferApplication] = pydantic.Field() """ The application who is receiving the offer. """ - creator: typing.Optional[OfferCreator] = pydantic_v1.Field() + creator: typing.Optional[OfferCreator] = pydantic.Field() """ The user who created the offer. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's offer was created. """ - closed_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + closed_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the offer was closed. """ - sent_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + sent_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the offer was sent. """ - start_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_date: typing.Optional[dt.datetime] = pydantic.Field() """ The employment start date on the offer. """ - status: typing.Optional[OfferStatus] = pydantic_v1.Field() + status: typing.Optional[OfferStatus] = pydantic.Field() """ The offer's status. @@ -86,7 +87,7 @@ class Offer(pydantic_v1.BaseModel): - `DEPRECATED` - DEPRECATED """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -94,25 +95,16 @@ class Offer(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow from .offer_application import OfferApplication # noqa: E402 -Offer.update_forward_refs() +update_forward_refs(Offer) diff --git a/src/merge/resources/ats/types/office.py b/src/merge/resources/ats/types/office.py index afd75362..c6a92e5b 100644 --- a/src/merge/resources/ats/types/office.py +++ b/src/merge/resources/ats/types/office.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData -class Office(pydantic_v1.BaseModel): +class Office(UniversalBaseModel): """ # The Office Object @@ -22,32 +23,32 @@ class Office(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The office's name. """ - location: typing.Optional[str] = pydantic_v1.Field() + location: typing.Optional[str] = pydantic.Field() """ The office's location. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -55,20 +56,11 @@ class Office(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_account_details_and_actions_list.py b/src/merge/resources/ats/types/paginated_account_details_and_actions_list.py index 280100c4..07323330 100644 --- a/src/merge/resources/ats/types/paginated_account_details_and_actions_list.py +++ b/src/merge/resources/ats/types/paginated_account_details_and_actions_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_details_and_actions import AccountDetailsAndActions -class PaginatedAccountDetailsAndActionsList(pydantic_v1.BaseModel): +class PaginatedAccountDetailsAndActionsList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[AccountDetailsAndActions]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_activity_list.py b/src/merge/resources/ats/types/paginated_activity_list.py index 3696f98d..e3dc0eed 100644 --- a/src/merge/resources/ats/types/paginated_activity_list.py +++ b/src/merge/resources/ats/types/paginated_activity_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .activity import Activity -class PaginatedActivityList(pydantic_v1.BaseModel): +class PaginatedActivityList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Activity]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_application_list.py b/src/merge/resources/ats/types/paginated_application_list.py index ea781037..22fc1732 100644 --- a/src/merge/resources/ats/types/paginated_application_list.py +++ b/src/merge/resources/ats/types/paginated_application_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .application import Application -class PaginatedApplicationList(pydantic_v1.BaseModel): +class PaginatedApplicationList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Application]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_attachment_list.py b/src/merge/resources/ats/types/paginated_attachment_list.py index 52f28d1b..4e3d196b 100644 --- a/src/merge/resources/ats/types/paginated_attachment_list.py +++ b/src/merge/resources/ats/types/paginated_attachment_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .attachment import Attachment -class PaginatedAttachmentList(pydantic_v1.BaseModel): +class PaginatedAttachmentList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Attachment]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_audit_log_event_list.py b/src/merge/resources/ats/types/paginated_audit_log_event_list.py index 1d4154d2..e5e04fa7 100644 --- a/src/merge/resources/ats/types/paginated_audit_log_event_list.py +++ b/src/merge/resources/ats/types/paginated_audit_log_event_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .audit_log_event import AuditLogEvent -class PaginatedAuditLogEventList(pydantic_v1.BaseModel): +class PaginatedAuditLogEventList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[AuditLogEvent]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_candidate_list.py b/src/merge/resources/ats/types/paginated_candidate_list.py index 0f644c5d..d47f613a 100644 --- a/src/merge/resources/ats/types/paginated_candidate_list.py +++ b/src/merge/resources/ats/types/paginated_candidate_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .candidate import Candidate -class PaginatedCandidateList(pydantic_v1.BaseModel): +class PaginatedCandidateList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Candidate]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_department_list.py b/src/merge/resources/ats/types/paginated_department_list.py index 406818fe..5d54bbce 100644 --- a/src/merge/resources/ats/types/paginated_department_list.py +++ b/src/merge/resources/ats/types/paginated_department_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .department import Department -class PaginatedDepartmentList(pydantic_v1.BaseModel): +class PaginatedDepartmentList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Department]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_eeoc_list.py b/src/merge/resources/ats/types/paginated_eeoc_list.py index 58acad54..a2e7319e 100644 --- a/src/merge/resources/ats/types/paginated_eeoc_list.py +++ b/src/merge/resources/ats/types/paginated_eeoc_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .eeoc import Eeoc -class PaginatedEeocList(pydantic_v1.BaseModel): +class PaginatedEeocList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Eeoc]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_issue_list.py b/src/merge/resources/ats/types/paginated_issue_list.py index 1016e29a..da8437f1 100644 --- a/src/merge/resources/ats/types/paginated_issue_list.py +++ b/src/merge/resources/ats/types/paginated_issue_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .issue import Issue -class PaginatedIssueList(pydantic_v1.BaseModel): +class PaginatedIssueList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Issue]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_job_interview_stage_list.py b/src/merge/resources/ats/types/paginated_job_interview_stage_list.py index d65d4551..807a85fd 100644 --- a/src/merge/resources/ats/types/paginated_job_interview_stage_list.py +++ b/src/merge/resources/ats/types/paginated_job_interview_stage_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .job_interview_stage import JobInterviewStage -class PaginatedJobInterviewStageList(pydantic_v1.BaseModel): +class PaginatedJobInterviewStageList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[JobInterviewStage]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_job_list.py b/src/merge/resources/ats/types/paginated_job_list.py index 031c22d2..159a5316 100644 --- a/src/merge/resources/ats/types/paginated_job_list.py +++ b/src/merge/resources/ats/types/paginated_job_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .job import Job -class PaginatedJobList(pydantic_v1.BaseModel): +class PaginatedJobList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Job]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_job_posting_list.py b/src/merge/resources/ats/types/paginated_job_posting_list.py index f71b7991..69f719b4 100644 --- a/src/merge/resources/ats/types/paginated_job_posting_list.py +++ b/src/merge/resources/ats/types/paginated_job_posting_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .job_posting import JobPosting -class PaginatedJobPostingList(pydantic_v1.BaseModel): +class PaginatedJobPostingList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[JobPosting]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_offer_list.py b/src/merge/resources/ats/types/paginated_offer_list.py index 86355bba..f07f1d5e 100644 --- a/src/merge/resources/ats/types/paginated_offer_list.py +++ b/src/merge/resources/ats/types/paginated_offer_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .offer import Offer -class PaginatedOfferList(pydantic_v1.BaseModel): +class PaginatedOfferList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Offer]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_office_list.py b/src/merge/resources/ats/types/paginated_office_list.py index c435b372..1ffaa392 100644 --- a/src/merge/resources/ats/types/paginated_office_list.py +++ b/src/merge/resources/ats/types/paginated_office_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .office import Office -class PaginatedOfficeList(pydantic_v1.BaseModel): +class PaginatedOfficeList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Office]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_reject_reason_list.py b/src/merge/resources/ats/types/paginated_reject_reason_list.py index 83c3f847..bd952f0a 100644 --- a/src/merge/resources/ats/types/paginated_reject_reason_list.py +++ b/src/merge/resources/ats/types/paginated_reject_reason_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .reject_reason import RejectReason -class PaginatedRejectReasonList(pydantic_v1.BaseModel): +class PaginatedRejectReasonList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[RejectReason]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_remote_user_list.py b/src/merge/resources/ats/types/paginated_remote_user_list.py index 368a576d..abf6396b 100644 --- a/src/merge/resources/ats/types/paginated_remote_user_list.py +++ b/src/merge/resources/ats/types/paginated_remote_user_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_user import RemoteUser -class PaginatedRemoteUserList(pydantic_v1.BaseModel): +class PaginatedRemoteUserList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[RemoteUser]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_scheduled_interview_list.py b/src/merge/resources/ats/types/paginated_scheduled_interview_list.py index 1ecb460e..24b157ad 100644 --- a/src/merge/resources/ats/types/paginated_scheduled_interview_list.py +++ b/src/merge/resources/ats/types/paginated_scheduled_interview_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .scheduled_interview import ScheduledInterview -class PaginatedScheduledInterviewList(pydantic_v1.BaseModel): +class PaginatedScheduledInterviewList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[ScheduledInterview]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_scorecard_list.py b/src/merge/resources/ats/types/paginated_scorecard_list.py index 72895a94..9c941ecd 100644 --- a/src/merge/resources/ats/types/paginated_scorecard_list.py +++ b/src/merge/resources/ats/types/paginated_scorecard_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .scorecard import Scorecard -class PaginatedScorecardList(pydantic_v1.BaseModel): +class PaginatedScorecardList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Scorecard]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_screening_question_list.py b/src/merge/resources/ats/types/paginated_screening_question_list.py index 5b8e77a1..fde77fe9 100644 --- a/src/merge/resources/ats/types/paginated_screening_question_list.py +++ b/src/merge/resources/ats/types/paginated_screening_question_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .screening_question import ScreeningQuestion -class PaginatedScreeningQuestionList(pydantic_v1.BaseModel): +class PaginatedScreeningQuestionList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[ScreeningQuestion]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_sync_status_list.py b/src/merge/resources/ats/types/paginated_sync_status_list.py index 6c88197e..7faca80c 100644 --- a/src/merge/resources/ats/types/paginated_sync_status_list.py +++ b/src/merge/resources/ats/types/paginated_sync_status_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .sync_status import SyncStatus -class PaginatedSyncStatusList(pydantic_v1.BaseModel): +class PaginatedSyncStatusList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[SyncStatus]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/paginated_tag_list.py b/src/merge/resources/ats/types/paginated_tag_list.py index a41f1d61..286bc9e1 100644 --- a/src/merge/resources/ats/types/paginated_tag_list.py +++ b/src/merge/resources/ats/types/paginated_tag_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .tag import Tag -class PaginatedTagList(pydantic_v1.BaseModel): +class PaginatedTagList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Tag]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/patched_candidate_request.py b/src/merge/resources/ats/types/patched_candidate_request.py index 54191ac1..c492bb66 100644 --- a/src/merge/resources/ats/types/patched_candidate_request.py +++ b/src/merge/resources/ats/types/patched_candidate_request.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .email_address_request import EmailAddressRequest from .phone_number_request import PhoneNumberRequest from .url_request import UrlRequest -class PatchedCandidateRequest(pydantic_v1.BaseModel): +class PatchedCandidateRequest(UniversalBaseModel): """ # The Candidate Object @@ -23,42 +24,42 @@ class PatchedCandidateRequest(pydantic_v1.BaseModel): Fetch from the `LIST Candidates` endpoint and filter by `ID` to show all candidates. """ - first_name: typing.Optional[str] = pydantic_v1.Field() + first_name: typing.Optional[str] = pydantic.Field() """ The candidate's first name. """ - last_name: typing.Optional[str] = pydantic_v1.Field() + last_name: typing.Optional[str] = pydantic.Field() """ The candidate's last name. """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The candidate's current company. """ - title: typing.Optional[str] = pydantic_v1.Field() + title: typing.Optional[str] = pydantic.Field() """ The candidate's current title. """ - last_interaction_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + last_interaction_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the most recent interaction with the candidate occurred. """ - is_private: typing.Optional[bool] = pydantic_v1.Field() + is_private: typing.Optional[bool] = pydantic.Field() """ Whether or not the candidate is private. """ - can_email: typing.Optional[bool] = pydantic_v1.Field() + can_email: typing.Optional[bool] = pydantic.Field() """ Whether or not the candidate can be emailed. """ - locations: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + locations: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ The candidate's locations. """ @@ -66,17 +67,17 @@ class PatchedCandidateRequest(pydantic_v1.BaseModel): phone_numbers: typing.Optional[typing.List[PhoneNumberRequest]] email_addresses: typing.Optional[typing.List[EmailAddressRequest]] urls: typing.Optional[typing.List[UrlRequest]] - tags: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + tags: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ Array of `Tag` names as strings. """ - applications: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + applications: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ Array of `Application` object IDs. """ - attachments: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + attachments: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ Array of `Attachment` object IDs. """ @@ -85,20 +86,11 @@ class PatchedCandidateRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/phone_number.py b/src/merge/resources/ats/types/phone_number.py index 46d2b093..1eb5ae07 100644 --- a/src/merge/resources/ats/types/phone_number.py +++ b/src/merge/resources/ats/types/phone_number.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .phone_number_phone_number_type import PhoneNumberPhoneNumberType -class PhoneNumber(pydantic_v1.BaseModel): +class PhoneNumber(UniversalBaseModel): """ # The PhoneNumber Object @@ -21,22 +22,22 @@ class PhoneNumber(pydantic_v1.BaseModel): Fetch from the `GET Candidate` endpoint and view their phone numbers. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - value: typing.Optional[str] = pydantic_v1.Field() + value: typing.Optional[str] = pydantic.Field() """ The phone number. """ - phone_number_type: typing.Optional[PhoneNumberPhoneNumberType] = pydantic_v1.Field() + phone_number_type: typing.Optional[PhoneNumberPhoneNumberType] = pydantic.Field() """ The type of phone number. @@ -47,20 +48,11 @@ class PhoneNumber(pydantic_v1.BaseModel): - `OTHER` - OTHER """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/phone_number_request.py b/src/merge/resources/ats/types/phone_number_request.py index 498e5d6a..ec3936fa 100644 --- a/src/merge/resources/ats/types/phone_number_request.py +++ b/src/merge/resources/ats/types/phone_number_request.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .phone_number_request_phone_number_type import PhoneNumberRequestPhoneNumberType -class PhoneNumberRequest(pydantic_v1.BaseModel): +class PhoneNumberRequest(UniversalBaseModel): """ # The PhoneNumber Object @@ -21,12 +21,12 @@ class PhoneNumberRequest(pydantic_v1.BaseModel): Fetch from the `GET Candidate` endpoint and view their phone numbers. """ - value: typing.Optional[str] = pydantic_v1.Field() + value: typing.Optional[str] = pydantic.Field() """ The phone number. """ - phone_number_type: typing.Optional[PhoneNumberRequestPhoneNumberType] = pydantic_v1.Field() + phone_number_type: typing.Optional[PhoneNumberRequestPhoneNumberType] = pydantic.Field() """ The type of phone number. @@ -40,20 +40,11 @@ class PhoneNumberRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/reject_reason.py b/src/merge/resources/ats/types/reject_reason.py index ed4154ce..8561cac5 100644 --- a/src/merge/resources/ats/types/reject_reason.py +++ b/src/merge/resources/ats/types/reject_reason.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData -class RejectReason(pydantic_v1.BaseModel): +class RejectReason(UniversalBaseModel): """ # The RejectReason Object @@ -22,27 +23,27 @@ class RejectReason(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The rejection reason’s name. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -50,20 +51,11 @@ class RejectReason(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/remote_data.py b/src/merge/resources/ats/types/remote_data.py index 098f551b..d50bfca2 100644 --- a/src/merge/resources/ats/types/remote_data.py +++ b/src/merge/resources/ats/types/remote_data.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteData(pydantic_v1.BaseModel): + +class RemoteData(UniversalBaseModel): path: str data: typing.Optional[typing.Any] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/remote_endpoint_info.py b/src/merge/resources/ats/types/remote_endpoint_info.py index da6037bc..9f627cae 100644 --- a/src/merge/resources/ats/types/remote_endpoint_info.py +++ b/src/merge/resources/ats/types/remote_endpoint_info.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteEndpointInfo(pydantic_v1.BaseModel): + +class RemoteEndpointInfo(UniversalBaseModel): method: str url_path: str field_traversal_path: typing.List[typing.Any] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/remote_field_api.py b/src/merge/resources/ats/types/remote_field_api.py index c2a16698..1d1efb6a 100644 --- a/src/merge/resources/ats/types/remote_field_api.py +++ b/src/merge/resources/ats/types/remote_field_api.py @@ -1,39 +1,28 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .advanced_metadata import AdvancedMetadata from .remote_endpoint_info import RemoteEndpointInfo from .remote_field_api_coverage import RemoteFieldApiCoverage -class RemoteFieldApi(pydantic_v1.BaseModel): - schema_: typing.Dict[str, typing.Any] = pydantic_v1.Field(alias="schema") +class RemoteFieldApi(UniversalBaseModel): + schema_: typing.Dict[str, typing.Any] = pydantic.Field(alias="schema") remote_key_name: str remote_endpoint_info: RemoteEndpointInfo example_values: typing.List[typing.Any] advanced_metadata: typing.Optional[AdvancedMetadata] coverage: typing.Optional[RemoteFieldApiCoverage] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/remote_field_api_response.py b/src/merge/resources/ats/types/remote_field_api_response.py index ffe4f236..a133f3b8 100644 --- a/src/merge/resources/ats/types/remote_field_api_response.py +++ b/src/merge/resources/ats/types/remote_field_api_response.py @@ -1,47 +1,36 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .remote_field_api import RemoteFieldApi - - -class RemoteFieldApiResponse(pydantic_v1.BaseModel): - activity: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Activity") - application: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Application") - attachment: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Attachment") - candidate: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Candidate") - department: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Department") - eeoc: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="EEOC") - scheduled_interview: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="ScheduledInterview") - job: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Job") - job_posting: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="JobPosting") - job_interview_stage: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="JobInterviewStage") - offer: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Offer") - office: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Office") - reject_reason: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="RejectReason") - scorecard: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Scorecard") - tag: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Tag") - remote_user: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="RemoteUser") +import pydantic - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .remote_field_api import RemoteFieldApi - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} +class RemoteFieldApiResponse(UniversalBaseModel): + activity: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Activity") + application: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Application") + attachment: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Attachment") + candidate: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Candidate") + department: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Department") + eeoc: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="EEOC") + scheduled_interview: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="ScheduledInterview") + job: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Job") + job_posting: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="JobPosting") + job_interview_stage: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="JobInterviewStage") + offer: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Offer") + office: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Office") + reject_reason: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="RejectReason") + scorecard: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Scorecard") + tag: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Tag") + remote_user: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="RemoteUser") + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/remote_key.py b/src/merge/resources/ats/types/remote_key.py index e0bec368..0ce7d620 100644 --- a/src/merge/resources/ats/types/remote_key.py +++ b/src/merge/resources/ats/types/remote_key.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteKey(pydantic_v1.BaseModel): + +class RemoteKey(UniversalBaseModel): """ # The RemoteKey Object @@ -23,20 +23,11 @@ class RemoteKey(pydantic_v1.BaseModel): name: str key: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/remote_response.py b/src/merge/resources/ats/types/remote_response.py index e7c13c23..8edcb9a8 100644 --- a/src/merge/resources/ats/types/remote_response.py +++ b/src/merge/resources/ats/types/remote_response.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_response_response_type import RemoteResponseResponseType -class RemoteResponse(pydantic_v1.BaseModel): +class RemoteResponse(UniversalBaseModel): """ # The RemoteResponse Object @@ -29,20 +29,11 @@ class RemoteResponse(pydantic_v1.BaseModel): response_type: typing.Optional[RemoteResponseResponseType] headers: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/remote_user.py b/src/merge/resources/ats/types/remote_user.py index 63530604..40844431 100644 --- a/src/merge/resources/ats/types/remote_user.py +++ b/src/merge/resources/ats/types/remote_user.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData from .remote_user_access_role import RemoteUserAccessRole -class RemoteUser(pydantic_v1.BaseModel): +class RemoteUser(UniversalBaseModel): """ # The RemoteUser Object @@ -23,47 +24,47 @@ class RemoteUser(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - first_name: typing.Optional[str] = pydantic_v1.Field() + first_name: typing.Optional[str] = pydantic.Field() """ The user's first name. """ - last_name: typing.Optional[str] = pydantic_v1.Field() + last_name: typing.Optional[str] = pydantic.Field() """ The user's last name. """ - email: typing.Optional[str] = pydantic_v1.Field() + email: typing.Optional[str] = pydantic.Field() """ The user's email. """ - disabled: typing.Optional[bool] = pydantic_v1.Field() + disabled: typing.Optional[bool] = pydantic.Field() """ Whether the user's account had been disabled. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's user was created. """ - access_role: typing.Optional[RemoteUserAccessRole] = pydantic_v1.Field() + access_role: typing.Optional[RemoteUserAccessRole] = pydantic.Field() """ The user's role. @@ -74,7 +75,7 @@ class RemoteUser(pydantic_v1.BaseModel): - `INTERVIEWER` - INTERVIEWER """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -82,20 +83,11 @@ class RemoteUser(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/scheduled_interview.py b/src/merge/resources/ats/types/scheduled_interview.py index 05bd2b9c..034704e7 100644 --- a/src/merge/resources/ats/types/scheduled_interview.py +++ b/src/merge/resources/ats/types/scheduled_interview.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData from .scheduled_interview_application import ScheduledInterviewApplication from .scheduled_interview_interviewers_item import ScheduledInterviewInterviewersItem @@ -13,7 +14,7 @@ from .scheduled_interview_status import ScheduledInterviewStatus -class ScheduledInterview(pydantic_v1.BaseModel): +class ScheduledInterview(UniversalBaseModel): """ # The ScheduledInterview Object @@ -27,69 +28,67 @@ class ScheduledInterview(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - application: typing.Optional[ScheduledInterviewApplication] = pydantic_v1.Field() + application: typing.Optional[ScheduledInterviewApplication] = pydantic.Field() """ The application being interviewed. """ - job_interview_stage: typing.Optional[ScheduledInterviewJobInterviewStage] = pydantic_v1.Field() + job_interview_stage: typing.Optional[ScheduledInterviewJobInterviewStage] = pydantic.Field() """ The stage of the interview. """ - organizer: typing.Optional[ScheduledInterviewOrganizer] = pydantic_v1.Field() + organizer: typing.Optional[ScheduledInterviewOrganizer] = pydantic.Field() """ The user organizing the interview. """ - interviewers: typing.Optional[ - typing.List[typing.Optional[ScheduledInterviewInterviewersItem]] - ] = pydantic_v1.Field() + interviewers: typing.Optional[typing.List[typing.Optional[ScheduledInterviewInterviewersItem]]] = pydantic.Field() """ Array of `RemoteUser` IDs. """ - location: typing.Optional[str] = pydantic_v1.Field() + location: typing.Optional[str] = pydantic.Field() """ The interview's location. """ - start_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the interview was started. """ - end_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + end_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the interview was ended. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's interview was created. """ - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's interview was updated. """ - status: typing.Optional[ScheduledInterviewStatus] = pydantic_v1.Field() + status: typing.Optional[ScheduledInterviewStatus] = pydantic.Field() """ The interview's status. @@ -98,7 +97,7 @@ class ScheduledInterview(pydantic_v1.BaseModel): - `COMPLETE` - COMPLETE """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -106,20 +105,11 @@ class ScheduledInterview(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/scheduled_interview_request.py b/src/merge/resources/ats/types/scheduled_interview_request.py index 67bc2921..83c74b44 100644 --- a/src/merge/resources/ats/types/scheduled_interview_request.py +++ b/src/merge/resources/ats/types/scheduled_interview_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .scheduled_interview_request_application import ScheduledInterviewRequestApplication from .scheduled_interview_request_interviewers_item import ScheduledInterviewRequestInterviewersItem from .scheduled_interview_request_job_interview_stage import ScheduledInterviewRequestJobInterviewStage @@ -12,7 +13,7 @@ from .scheduled_interview_request_status import ScheduledInterviewRequestStatus -class ScheduledInterviewRequest(pydantic_v1.BaseModel): +class ScheduledInterviewRequest(UniversalBaseModel): """ # The ScheduledInterview Object @@ -25,44 +26,44 @@ class ScheduledInterviewRequest(pydantic_v1.BaseModel): Fetch from the `LIST ScheduledInterviews` endpoint and filter by `interviewers` to show all office locations. """ - application: typing.Optional[ScheduledInterviewRequestApplication] = pydantic_v1.Field() + application: typing.Optional[ScheduledInterviewRequestApplication] = pydantic.Field() """ The application being interviewed. """ - job_interview_stage: typing.Optional[ScheduledInterviewRequestJobInterviewStage] = pydantic_v1.Field() + job_interview_stage: typing.Optional[ScheduledInterviewRequestJobInterviewStage] = pydantic.Field() """ The stage of the interview. """ - organizer: typing.Optional[ScheduledInterviewRequestOrganizer] = pydantic_v1.Field() + organizer: typing.Optional[ScheduledInterviewRequestOrganizer] = pydantic.Field() """ The user organizing the interview. """ interviewers: typing.Optional[ typing.List[typing.Optional[ScheduledInterviewRequestInterviewersItem]] - ] = pydantic_v1.Field() + ] = pydantic.Field() """ Array of `RemoteUser` IDs. """ - location: typing.Optional[str] = pydantic_v1.Field() + location: typing.Optional[str] = pydantic.Field() """ The interview's location. """ - start_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the interview was started. """ - end_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + end_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the interview was ended. """ - status: typing.Optional[ScheduledInterviewRequestStatus] = pydantic_v1.Field() + status: typing.Optional[ScheduledInterviewRequestStatus] = pydantic.Field() """ The interview's status. @@ -74,20 +75,11 @@ class ScheduledInterviewRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/scheduled_interview_response.py b/src/merge/resources/ats/types/scheduled_interview_response.py index 17160aca..f568d3f7 100644 --- a/src/merge/resources/ats/types/scheduled_interview_response.py +++ b/src/merge/resources/ats/types/scheduled_interview_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .scheduled_interview import ScheduledInterview from .warning_validation_problem import WarningValidationProblem -class ScheduledInterviewResponse(pydantic_v1.BaseModel): +class ScheduledInterviewResponse(UniversalBaseModel): model: ScheduledInterview warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/scorecard.py b/src/merge/resources/ats/types/scorecard.py index 9aeebadf..339093fd 100644 --- a/src/merge/resources/ats/types/scorecard.py +++ b/src/merge/resources/ats/types/scorecard.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData from .scorecard_application import ScorecardApplication from .scorecard_interview import ScorecardInterview @@ -12,7 +13,7 @@ from .scorecard_overall_recommendation import ScorecardOverallRecommendation -class Scorecard(pydantic_v1.BaseModel): +class Scorecard(UniversalBaseModel): """ # The Scorecard Object @@ -26,47 +27,47 @@ class Scorecard(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - application: typing.Optional[ScorecardApplication] = pydantic_v1.Field() + application: typing.Optional[ScorecardApplication] = pydantic.Field() """ The application being scored. """ - interview: typing.Optional[ScorecardInterview] = pydantic_v1.Field() + interview: typing.Optional[ScorecardInterview] = pydantic.Field() """ The interview being scored. """ - interviewer: typing.Optional[ScorecardInterviewer] = pydantic_v1.Field() + interviewer: typing.Optional[ScorecardInterviewer] = pydantic.Field() """ The interviewer doing the scoring. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's scorecard was created. """ - submitted_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + submitted_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the scorecard was submitted. """ - overall_recommendation: typing.Optional[ScorecardOverallRecommendation] = pydantic_v1.Field() + overall_recommendation: typing.Optional[ScorecardOverallRecommendation] = pydantic.Field() """ The inteviewer's recommendation. @@ -77,7 +78,7 @@ class Scorecard(pydantic_v1.BaseModel): - `NO_DECISION` - NO_DECISION """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -85,20 +86,11 @@ class Scorecard(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/screening_question.py b/src/merge/resources/ats/types/screening_question.py index e2928fda..3e239f0a 100644 --- a/src/merge/resources/ats/types/screening_question.py +++ b/src/merge/resources/ats/types/screening_question.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .screening_question_job import ScreeningQuestionJob from .screening_question_type import ScreeningQuestionType -class ScreeningQuestion(pydantic_v1.BaseModel): +class ScreeningQuestion(UniversalBaseModel): """ # The ScreeningQuestion Object @@ -23,37 +24,37 @@ class ScreeningQuestion(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - job: typing.Optional[ScreeningQuestionJob] = pydantic_v1.Field() + job: typing.Optional[ScreeningQuestionJob] = pydantic.Field() """ The job associated with the screening question. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The description of the screening question """ - title: typing.Optional[str] = pydantic_v1.Field() + title: typing.Optional[str] = pydantic.Field() """ The title of the screening question """ - type: typing.Optional[ScreeningQuestionType] = pydantic_v1.Field() + type: typing.Optional[ScreeningQuestionType] = pydantic.Field() """ The data type for the screening question. @@ -67,27 +68,18 @@ class ScreeningQuestion(pydantic_v1.BaseModel): - `BOOLEAN` - BOOLEAN """ - required: typing.Optional[bool] = pydantic_v1.Field() + required: typing.Optional[bool] = pydantic.Field() """ Whether or not the screening question is required. """ options: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/screening_question_option.py b/src/merge/resources/ats/types/screening_question_option.py index 7d834fb8..4d6a57ee 100644 --- a/src/merge/resources/ats/types/screening_question_option.py +++ b/src/merge/resources/ats/types/screening_question_option.py @@ -3,11 +3,12 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ScreeningQuestionOption(pydantic_v1.BaseModel): + +class ScreeningQuestionOption(UniversalBaseModel): """ # The ScreeningQuestionOption Object @@ -21,40 +22,31 @@ class ScreeningQuestionOption(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - label: typing.Optional[str] = pydantic_v1.Field() + label: typing.Optional[str] = pydantic.Field() """ Available response options """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/sync_status.py b/src/merge/resources/ats/types/sync_status.py index c6b7cbc4..03668cbf 100644 --- a/src/merge/resources/ats/types/sync_status.py +++ b/src/merge/resources/ats/types/sync_status.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .selective_sync_configurations_usage_enum import SelectiveSyncConfigurationsUsageEnum from .sync_status_status_enum import SyncStatusStatusEnum -class SyncStatus(pydantic_v1.BaseModel): +class SyncStatus(UniversalBaseModel): """ # The SyncStatus Object @@ -30,20 +31,11 @@ class SyncStatus(pydantic_v1.BaseModel): is_initial_sync: bool selective_sync_configurations_usage: typing.Optional[SelectiveSyncConfigurationsUsageEnum] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/tag.py b/src/merge/resources/ats/types/tag.py index ed81b764..bae820c2 100644 --- a/src/merge/resources/ats/types/tag.py +++ b/src/merge/resources/ats/types/tag.py @@ -3,11 +3,12 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class Tag(pydantic_v1.BaseModel): + +class Tag(UniversalBaseModel): """ # The Tag Object @@ -20,27 +21,27 @@ class Tag(pydantic_v1.BaseModel): Fetch from the `LIST Tags` endpoint and view the tags used within a company. """ - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The tag's name. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -48,20 +49,11 @@ class Tag(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[typing.Optional[typing.Dict[str, typing.Any]]]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/url.py b/src/merge/resources/ats/types/url.py index 0c6bea47..7e0e6f6c 100644 --- a/src/merge/resources/ats/types/url.py +++ b/src/merge/resources/ats/types/url.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .url_url_type import UrlUrlType -class Url(pydantic_v1.BaseModel): +class Url(UniversalBaseModel): """ # The Url Object @@ -21,22 +22,22 @@ class Url(pydantic_v1.BaseModel): Fetch from the `GET Candidate` endpoint and view their website urls. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - value: typing.Optional[str] = pydantic_v1.Field() + value: typing.Optional[str] = pydantic.Field() """ The site's url. """ - url_type: typing.Optional[UrlUrlType] = pydantic_v1.Field() + url_type: typing.Optional[UrlUrlType] = pydantic.Field() """ The type of site. @@ -49,20 +50,11 @@ class Url(pydantic_v1.BaseModel): - `JOB_POSTING` - JOB_POSTING """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/url_request.py b/src/merge/resources/ats/types/url_request.py index 3eebb3f7..a064c9f2 100644 --- a/src/merge/resources/ats/types/url_request.py +++ b/src/merge/resources/ats/types/url_request.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .url_request_url_type import UrlRequestUrlType -class UrlRequest(pydantic_v1.BaseModel): +class UrlRequest(UniversalBaseModel): """ # The Url Object @@ -21,12 +21,12 @@ class UrlRequest(pydantic_v1.BaseModel): Fetch from the `GET Candidate` endpoint and view their website urls. """ - value: typing.Optional[str] = pydantic_v1.Field() + value: typing.Optional[str] = pydantic.Field() """ The site's url. """ - url_type: typing.Optional[UrlRequestUrlType] = pydantic_v1.Field() + url_type: typing.Optional[UrlRequestUrlType] = pydantic.Field() """ The type of site. @@ -42,20 +42,11 @@ class UrlRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/validation_problem_source.py b/src/merge/resources/ats/types/validation_problem_source.py index fde15b40..c65d82ef 100644 --- a/src/merge/resources/ats/types/validation_problem_source.py +++ b/src/merge/resources/ats/types/validation_problem_source.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ValidationProblemSource(pydantic_v1.BaseModel): - pointer: str - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ValidationProblemSource(UniversalBaseModel): + pointer: str - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/warning_validation_problem.py b/src/merge/resources/ats/types/warning_validation_problem.py index 6baf9600..348d668a 100644 --- a/src/merge/resources/ats/types/warning_validation_problem.py +++ b/src/merge/resources/ats/types/warning_validation_problem.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .validation_problem_source import ValidationProblemSource -class WarningValidationProblem(pydantic_v1.BaseModel): +class WarningValidationProblem(UniversalBaseModel): source: typing.Optional[ValidationProblemSource] title: str detail: str problem_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ats/types/webhook_receiver.py b/src/merge/resources/ats/types/webhook_receiver.py index 0544f256..bb10af95 100644 --- a/src/merge/resources/ats/types/webhook_receiver.py +++ b/src/merge/resources/ats/types/webhook_receiver.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class WebhookReceiver(pydantic_v1.BaseModel): + +class WebhookReceiver(UniversalBaseModel): event: str is_active: bool key: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/resources/account_details/client.py b/src/merge/resources/crm/resources/account_details/client.py index b892c7ad..97e5f466 100644 --- a/src/merge/resources/crm/resources/account_details/client.py +++ b/src/merge/resources/crm/resources/account_details/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.account_details import AccountDetails @@ -41,9 +41,9 @@ def retrieve(self, *, request_options: typing.Optional[RequestOptions] = None) - _response = self._client_wrapper.httpx_client.request( "crm/v1/account-details", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountDetails, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountDetails, parse_obj_as(type_=AccountDetails, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -70,20 +70,28 @@ async def retrieve(self, *, request_options: typing.Optional[RequestOptions] = N Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.account_details.retrieve() + + + async def main() -> None: + await client.crm.account_details.retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/account-details", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountDetails, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountDetails, parse_obj_as(type_=AccountDetails, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/account_token/client.py b/src/merge/resources/crm/resources/account_token/client.py index 9fd7ad61..c8a9615a 100644 --- a/src/merge/resources/crm/resources/account_token/client.py +++ b/src/merge/resources/crm/resources/account_token/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.account_token import AccountToken @@ -46,9 +46,9 @@ def retrieve(self, public_token: str, *, request_options: typing.Optional[Reques _response = self._client_wrapper.httpx_client.request( f"crm/v1/account-token/{jsonable_encoder(public_token)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountToken, parse_obj_as(type_=AccountToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -79,22 +79,30 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.account_token.retrieve( - public_token="public_token", - ) + + + async def main() -> None: + await client.crm.account_token.retrieve( + public_token="public_token", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/account-token/{jsonable_encoder(public_token)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountToken, parse_obj_as(type_=AccountToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/accounts/client.py b/src/merge/resources/crm/resources/accounts/client.py index 878a699e..0246f518 100644 --- a/src/merge/resources/crm/resources/accounts/client.py +++ b/src/merge/resources/crm/resources/accounts/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.account import Account from ...types.account_request import AccountRequest @@ -126,9 +126,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountList, parse_obj_as(type_=PaginatedAccountList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -184,9 +184,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CrmAccountResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CrmAccountResponse, parse_obj_as(type_=CrmAccountResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -247,9 +247,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Account, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Account, parse_obj_as(type_=Account, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -309,9 +309,9 @@ def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CrmAccountResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CrmAccountResponse, parse_obj_as(type_=CrmAccountResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -348,9 +348,9 @@ def meta_patch_retrieve(self, id: str, *, request_options: typing.Optional[Reque _response = self._client_wrapper.httpx_client.request( f"crm/v1/accounts/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -383,9 +383,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "crm/v1/accounts/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -451,9 +451,9 @@ def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -536,13 +536,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.accounts.list() + + + async def main() -> None: + await client.crm.accounts.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/accounts", @@ -564,9 +572,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountList, parse_obj_as(type_=PaginatedAccountList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -603,6 +611,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import AccountRequest @@ -610,9 +620,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.accounts.create( - model=AccountRequest(), - ) + + + async def main() -> None: + await client.crm.accounts.create( + model=AccountRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/accounts", @@ -622,9 +638,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CrmAccountResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CrmAccountResponse, parse_obj_as(type_=CrmAccountResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -665,15 +681,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.accounts.retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.accounts.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/accounts/{jsonable_encoder(id)}", @@ -685,9 +709,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Account, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Account, parse_obj_as(type_=Account, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -727,6 +751,8 @@ async def partial_update( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import PatchedAccountRequest @@ -734,10 +760,16 @@ async def partial_update( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.accounts.partial_update( - id="id", - model=PatchedAccountRequest(), - ) + + + async def main() -> None: + await client.crm.accounts.partial_update( + id="id", + model=PatchedAccountRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/accounts/{jsonable_encoder(id)}", @@ -747,9 +779,9 @@ async def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CrmAccountResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CrmAccountResponse, parse_obj_as(type_=CrmAccountResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -775,22 +807,30 @@ async def meta_patch_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.accounts.meta_patch_retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.accounts.meta_patch_retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/accounts/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -812,20 +852,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.accounts.meta_post_retrieve() + + + async def main() -> None: + await client.crm.accounts.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/accounts/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -871,13 +919,21 @@ async def remote_field_classes_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.accounts.remote_field_classes_list() + + + async def main() -> None: + await client.crm.accounts.remote_field_classes_list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/accounts/remote-field-classes", @@ -891,9 +947,9 @@ async def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/association_types/client.py b/src/merge/resources/crm/resources/association_types/client.py index f8f84cc5..2e600860 100644 --- a/src/merge/resources/crm/resources/association_types/client.py +++ b/src/merge/resources/crm/resources/association_types/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.association_type import AssociationType from ...types.association_type_request_request import AssociationTypeRequestRequest @@ -114,9 +114,9 @@ def custom_object_classes_association_types_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAssociationTypeList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAssociationTypeList, parse_obj_as(type_=PaginatedAssociationTypeList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -192,9 +192,9 @@ def custom_object_classes_association_types_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CrmAssociationTypeResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CrmAssociationTypeResponse, parse_obj_as(type_=CrmAssociationTypeResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -251,9 +251,9 @@ def custom_object_classes_association_types_retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AssociationType, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AssociationType, parse_obj_as(type_=AssociationType, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -294,9 +294,9 @@ def custom_object_classes_association_types_meta_post_retrieve( method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -370,15 +370,23 @@ async def custom_object_classes_association_types_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.association_types.custom_object_classes_association_types_list( - custom_object_class_id="custom_object_class_id", - ) + + + async def main() -> None: + await client.crm.association_types.custom_object_classes_association_types_list( + custom_object_class_id="custom_object_class_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/custom-object-classes/{jsonable_encoder(custom_object_class_id)}/association-types", @@ -397,9 +405,9 @@ async def custom_object_classes_association_types_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAssociationTypeList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAssociationTypeList, parse_obj_as(type_=PaginatedAssociationTypeList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -439,6 +447,8 @@ async def custom_object_classes_association_types_create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import ( AssociationTypeRequestRequest, @@ -450,22 +460,28 @@ async def custom_object_classes_association_types_create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.association_types.custom_object_classes_association_types_create( - custom_object_class_id="custom_object_class_id", - model=AssociationTypeRequestRequest( - source_object_class=ObjectClassDescriptionRequest( - id="id", - origin_type=OriginTypeEnum.CUSTOM_OBJECT, - ), - target_object_classes=[ - ObjectClassDescriptionRequest( + + + async def main() -> None: + await client.crm.association_types.custom_object_classes_association_types_create( + custom_object_class_id="custom_object_class_id", + model=AssociationTypeRequestRequest( + source_object_class=ObjectClassDescriptionRequest( id="id", origin_type=OriginTypeEnum.CUSTOM_OBJECT, - ) - ], - remote_key_name="remote_key_name", - ), - ) + ), + target_object_classes=[ + ObjectClassDescriptionRequest( + id="id", + origin_type=OriginTypeEnum.CUSTOM_OBJECT, + ) + ], + remote_key_name="remote_key_name", + ), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/custom-object-classes/{jsonable_encoder(custom_object_class_id)}/association-types", @@ -475,9 +491,9 @@ async def custom_object_classes_association_types_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CrmAssociationTypeResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CrmAssociationTypeResponse, parse_obj_as(type_=CrmAssociationTypeResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -517,16 +533,24 @@ async def custom_object_classes_association_types_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.association_types.custom_object_classes_association_types_retrieve( - custom_object_class_id="custom_object_class_id", - id="id", - ) + + + async def main() -> None: + await client.crm.association_types.custom_object_classes_association_types_retrieve( + custom_object_class_id="custom_object_class_id", + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/custom-object-classes/{jsonable_encoder(custom_object_class_id)}/association-types/{jsonable_encoder(id)}", @@ -534,9 +558,9 @@ async def custom_object_classes_association_types_retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AssociationType, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AssociationType, parse_obj_as(type_=AssociationType, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -562,24 +586,32 @@ async def custom_object_classes_association_types_meta_post_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.association_types.custom_object_classes_association_types_meta_post_retrieve( - custom_object_class_id="custom_object_class_id", - ) + + + async def main() -> None: + await client.crm.association_types.custom_object_classes_association_types_meta_post_retrieve( + custom_object_class_id="custom_object_class_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/custom-object-classes/{jsonable_encoder(custom_object_class_id)}/association-types/meta/post", method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/associations/client.py b/src/merge/resources/crm/resources/associations/client.py index 490e2db9..105e3778 100644 --- a/src/merge/resources/crm/resources/associations/client.py +++ b/src/merge/resources/crm/resources/associations/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.association import Association from ...types.paginated_association_list import PaginatedAssociationList @@ -117,9 +117,9 @@ def custom_object_classes_custom_objects_associations_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAssociationList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAssociationList, parse_obj_as(type_=PaginatedAssociationList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -188,9 +188,9 @@ def custom_object_classes_custom_objects_associations_update( params={"is_debug_mode": is_debug_mode, "run_async": run_async}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Association, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Association, parse_obj_as(type_=Association, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -271,16 +271,24 @@ async def custom_object_classes_custom_objects_associations_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.associations.custom_object_classes_custom_objects_associations_list( - custom_object_class_id="custom_object_class_id", - object_id="object_id", - ) + + + async def main() -> None: + await client.crm.associations.custom_object_classes_custom_objects_associations_list( + custom_object_class_id="custom_object_class_id", + object_id="object_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/custom-object-classes/{jsonable_encoder(custom_object_class_id)}/custom-objects/{jsonable_encoder(object_id)}/associations", @@ -300,9 +308,9 @@ async def custom_object_classes_custom_objects_associations_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAssociationList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAssociationList, parse_obj_as(type_=PaginatedAssociationList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -351,19 +359,27 @@ async def custom_object_classes_custom_objects_associations_update( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.associations.custom_object_classes_custom_objects_associations_update( - association_type_id="association_type_id", - source_class_id="source_class_id", - source_object_id="source_object_id", - target_class_id="target_class_id", - target_object_id="target_object_id", - ) + + + async def main() -> None: + await client.crm.associations.custom_object_classes_custom_objects_associations_update( + association_type_id="association_type_id", + source_class_id="source_class_id", + source_object_id="source_object_id", + target_class_id="target_class_id", + target_object_id="target_object_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/custom-object-classes/{jsonable_encoder(source_class_id)}/custom-objects/{jsonable_encoder(source_object_id)}/associations/{jsonable_encoder(target_class_id)}/{jsonable_encoder(target_object_id)}/{jsonable_encoder(association_type_id)}", @@ -371,9 +387,9 @@ async def custom_object_classes_custom_objects_associations_update( params={"is_debug_mode": is_debug_mode, "run_async": run_async}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Association, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Association, parse_obj_as(type_=Association, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/async_passthrough/client.py b/src/merge/resources/crm/resources/async_passthrough/client.py index ed6e9adf..1c7ec721 100644 --- a/src/merge/resources/crm/resources/async_passthrough/client.py +++ b/src/merge/resources/crm/resources/async_passthrough/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.async_passthrough_reciept import AsyncPassthroughReciept from ...types.data_passthrough_request import DataPassthroughRequest @@ -57,9 +57,9 @@ def create( _response = self._client_wrapper.httpx_client.request( "crm/v1/async-passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AsyncPassthroughReciept, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AsyncPassthroughReciept, parse_obj_as(type_=AsyncPassthroughReciept, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -100,9 +100,9 @@ def retrieve( method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -133,6 +133,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import DataPassthroughRequest, MethodEnum @@ -140,19 +142,25 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.async_passthrough.create( - request=DataPassthroughRequest( - method=MethodEnum.GET, - path="/scooters", - ), - ) + + + async def main() -> None: + await client.crm.async_passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/async-passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AsyncPassthroughReciept, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AsyncPassthroughReciept, parse_obj_as(type_=AsyncPassthroughReciept, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -178,24 +186,32 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.async_passthrough.retrieve( - async_passthrough_receipt_id="async_passthrough_receipt_id", - ) + + + async def main() -> None: + await client.crm.async_passthrough.retrieve( + async_passthrough_receipt_id="async_passthrough_receipt_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/async-passthrough/{jsonable_encoder(async_passthrough_receipt_id)}", method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/audit_trail/client.py b/src/merge/resources/crm/resources/audit_trail/client.py index 01353b04..f4dcd65d 100644 --- a/src/merge/resources/crm/resources/audit_trail/client.py +++ b/src/merge/resources/crm/resources/audit_trail/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_audit_log_event_list import PaginatedAuditLogEventList @@ -79,9 +79,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAuditLogEventList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAuditLogEventList, parse_obj_as(type_=PaginatedAuditLogEventList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -136,13 +136,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.audit_trail.list() + + + async def main() -> None: + await client.crm.audit_trail.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/audit-trail", @@ -157,9 +165,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAuditLogEventList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAuditLogEventList, parse_obj_as(type_=PaginatedAuditLogEventList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/available_actions/client.py b/src/merge/resources/crm/resources/available_actions/client.py index 4d4bb5a5..6893ede5 100644 --- a/src/merge/resources/crm/resources/available_actions/client.py +++ b/src/merge/resources/crm/resources/available_actions/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.available_actions import AvailableActions @@ -41,9 +41,9 @@ def retrieve(self, *, request_options: typing.Optional[RequestOptions] = None) - _response = self._client_wrapper.httpx_client.request( "crm/v1/available-actions", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AvailableActions, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AvailableActions, parse_obj_as(type_=AvailableActions, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -70,20 +70,28 @@ async def retrieve(self, *, request_options: typing.Optional[RequestOptions] = N Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.available_actions.retrieve() + + + async def main() -> None: + await client.crm.available_actions.retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/available-actions", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AvailableActions, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AvailableActions, parse_obj_as(type_=AvailableActions, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/contacts/client.py b/src/merge/resources/crm/resources/contacts/client.py index c9cf539d..0cf5dcc3 100644 --- a/src/merge/resources/crm/resources/contacts/client.py +++ b/src/merge/resources/crm/resources/contacts/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.contact import Contact from ...types.contact_request import ContactRequest @@ -134,9 +134,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedContactList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedContactList, parse_obj_as(type_=PaginatedContactList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -192,9 +192,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CrmContactResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CrmContactResponse, parse_obj_as(type_=CrmContactResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -255,9 +255,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Contact, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Contact, parse_obj_as(type_=Contact, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -317,9 +317,9 @@ def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CrmContactResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CrmContactResponse, parse_obj_as(type_=CrmContactResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -371,9 +371,9 @@ def ignore_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -410,9 +410,9 @@ def meta_patch_retrieve(self, id: str, *, request_options: typing.Optional[Reque _response = self._client_wrapper.httpx_client.request( f"crm/v1/contacts/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -445,9 +445,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "crm/v1/contacts/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -513,9 +513,9 @@ def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -602,13 +602,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.contacts.list() + + + async def main() -> None: + await client.crm.contacts.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/contacts", @@ -631,9 +639,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedContactList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedContactList, parse_obj_as(type_=PaginatedContactList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -670,6 +678,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import ContactRequest @@ -677,9 +687,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.contacts.create( - model=ContactRequest(), - ) + + + async def main() -> None: + await client.crm.contacts.create( + model=ContactRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/contacts", @@ -689,9 +705,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CrmContactResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CrmContactResponse, parse_obj_as(type_=CrmContactResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -732,15 +748,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.contacts.retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.contacts.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/contacts/{jsonable_encoder(id)}", @@ -752,9 +776,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Contact, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Contact, parse_obj_as(type_=Contact, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -794,6 +818,8 @@ async def partial_update( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import PatchedContactRequest @@ -801,10 +827,16 @@ async def partial_update( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.contacts.partial_update( - id="id", - model=PatchedContactRequest(), - ) + + + async def main() -> None: + await client.crm.contacts.partial_update( + id="id", + model=PatchedContactRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/contacts/{jsonable_encoder(id)}", @@ -814,9 +846,9 @@ async def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CrmContactResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CrmContactResponse, parse_obj_as(type_=CrmContactResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -847,6 +879,8 @@ async def ignore_create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import IgnoreCommonModelRequest, ReasonEnum @@ -854,12 +888,18 @@ async def ignore_create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.contacts.ignore_create( - model_id="model_id", - request=IgnoreCommonModelRequest( - reason=ReasonEnum.GENERAL_CUSTOMER_REQUEST, - ), - ) + + + async def main() -> None: + await client.crm.contacts.ignore_create( + model_id="model_id", + request=IgnoreCommonModelRequest( + reason=ReasonEnum.GENERAL_CUSTOMER_REQUEST, + ), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/contacts/ignore/{jsonable_encoder(model_id)}", @@ -868,9 +908,9 @@ async def ignore_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -896,22 +936,30 @@ async def meta_patch_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.contacts.meta_patch_retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.contacts.meta_patch_retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/contacts/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -933,20 +981,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.contacts.meta_post_retrieve() + + + async def main() -> None: + await client.crm.contacts.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/contacts/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -992,13 +1048,21 @@ async def remote_field_classes_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.contacts.remote_field_classes_list() + + + async def main() -> None: + await client.crm.contacts.remote_field_classes_list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/contacts/remote-field-classes", @@ -1012,9 +1076,9 @@ async def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/custom_object_classes/client.py b/src/merge/resources/crm/resources/custom_object_classes/client.py index f85c8ef0..84d5bc54 100644 --- a/src/merge/resources/crm/resources/custom_object_classes/client.py +++ b/src/merge/resources/crm/resources/custom_object_classes/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.custom_object_class import CustomObjectClass from ...types.paginated_custom_object_class_list import PaginatedCustomObjectClassList @@ -103,9 +103,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCustomObjectClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCustomObjectClassList, parse_obj_as(type_=PaginatedCustomObjectClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -158,9 +158,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CustomObjectClass, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CustomObjectClass, parse_obj_as(type_=CustomObjectClass, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -231,13 +231,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.custom_object_classes.list() + + + async def main() -> None: + await client.crm.custom_object_classes.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/custom-object-classes", @@ -256,9 +264,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCustomObjectClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCustomObjectClassList, parse_obj_as(type_=PaginatedCustomObjectClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -295,15 +303,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.custom_object_classes.retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.custom_object_classes.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/custom-object-classes/{jsonable_encoder(id)}", @@ -311,9 +327,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CustomObjectClass, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CustomObjectClass, parse_obj_as(type_=CustomObjectClass, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/custom_objects/client.py b/src/merge/resources/crm/resources/custom_objects/client.py index 752945e3..20385cf0 100644 --- a/src/merge/resources/crm/resources/custom_objects/client.py +++ b/src/merge/resources/crm/resources/custom_objects/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.crm_custom_object_response import CrmCustomObjectResponse from ...types.custom_object import CustomObject @@ -114,9 +114,9 @@ def custom_object_classes_custom_objects_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCustomObjectList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCustomObjectList, parse_obj_as(type_=PaginatedCustomObjectList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -178,9 +178,9 @@ def custom_object_classes_custom_objects_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CrmCustomObjectResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CrmCustomObjectResponse, parse_obj_as(type_=CrmCustomObjectResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -237,9 +237,9 @@ def custom_object_classes_custom_objects_retrieve( params={"include_remote_data": include_remote_data, "include_remote_fields": include_remote_fields}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CustomObject, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CustomObject, parse_obj_as(type_=CustomObject, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -280,9 +280,9 @@ def custom_object_classes_custom_objects_meta_post_retrieve( method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -356,15 +356,23 @@ async def custom_object_classes_custom_objects_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.custom_objects.custom_object_classes_custom_objects_list( - custom_object_class_id="custom_object_class_id", - ) + + + async def main() -> None: + await client.crm.custom_objects.custom_object_classes_custom_objects_list( + custom_object_class_id="custom_object_class_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/custom-object-classes/{jsonable_encoder(custom_object_class_id)}/custom-objects", @@ -383,9 +391,9 @@ async def custom_object_classes_custom_objects_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCustomObjectList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCustomObjectList, parse_obj_as(type_=PaginatedCustomObjectList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -425,6 +433,8 @@ async def custom_object_classes_custom_objects_create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import CustomObjectRequest @@ -432,12 +442,18 @@ async def custom_object_classes_custom_objects_create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.custom_objects.custom_object_classes_custom_objects_create( - custom_object_class_id="custom_object_class_id", - model=CustomObjectRequest( - fields={"test_field": "hello"}, - ), - ) + + + async def main() -> None: + await client.crm.custom_objects.custom_object_classes_custom_objects_create( + custom_object_class_id="custom_object_class_id", + model=CustomObjectRequest( + fields={"test_field": "hello"}, + ), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/custom-object-classes/{jsonable_encoder(custom_object_class_id)}/custom-objects", @@ -447,9 +463,9 @@ async def custom_object_classes_custom_objects_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CrmCustomObjectResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CrmCustomObjectResponse, parse_obj_as(type_=CrmCustomObjectResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -489,16 +505,24 @@ async def custom_object_classes_custom_objects_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.custom_objects.custom_object_classes_custom_objects_retrieve( - custom_object_class_id="custom_object_class_id", - id="id", - ) + + + async def main() -> None: + await client.crm.custom_objects.custom_object_classes_custom_objects_retrieve( + custom_object_class_id="custom_object_class_id", + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/custom-object-classes/{jsonable_encoder(custom_object_class_id)}/custom-objects/{jsonable_encoder(id)}", @@ -506,9 +530,9 @@ async def custom_object_classes_custom_objects_retrieve( params={"include_remote_data": include_remote_data, "include_remote_fields": include_remote_fields}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CustomObject, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CustomObject, parse_obj_as(type_=CustomObject, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -534,24 +558,32 @@ async def custom_object_classes_custom_objects_meta_post_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.custom_objects.custom_object_classes_custom_objects_meta_post_retrieve( - custom_object_class_id="custom_object_class_id", - ) + + + async def main() -> None: + await client.crm.custom_objects.custom_object_classes_custom_objects_meta_post_retrieve( + custom_object_class_id="custom_object_class_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/custom-object-classes/{jsonable_encoder(custom_object_class_id)}/custom-objects/meta/post", method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/delete_account/client.py b/src/merge/resources/crm/resources/delete_account/client.py index 8a6b2b96..c08ed9e4 100644 --- a/src/merge/resources/crm/resources/delete_account/client.py +++ b/src/merge/resources/crm/resources/delete_account/client.py @@ -38,9 +38,9 @@ def delete(self, *, request_options: typing.Optional[RequestOptions] = None) -> _response = self._client_wrapper.httpx_client.request( "crm/v1/delete-account", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -66,20 +66,28 @@ async def delete(self, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.delete_account.delete() + + + async def main() -> None: + await client.crm.delete_account.delete() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/delete-account", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/engagement_types/client.py b/src/merge/resources/crm/resources/engagement_types/client.py index f9f4fc1d..0f82a747 100644 --- a/src/merge/resources/crm/resources/engagement_types/client.py +++ b/src/merge/resources/crm/resources/engagement_types/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.engagement_type import EngagementType from ...types.paginated_engagement_type_list import PaginatedEngagementTypeList @@ -104,9 +104,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedEngagementTypeList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedEngagementTypeList, parse_obj_as(type_=PaginatedEngagementTypeList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -159,9 +159,9 @@ def retrieve( params={"include_remote_data": include_remote_data, "include_remote_fields": include_remote_fields}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EngagementType, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(EngagementType, parse_obj_as(type_=EngagementType, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -227,9 +227,9 @@ def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -300,13 +300,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.engagement_types.list() + + + async def main() -> None: + await client.crm.engagement_types.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/engagement-types", @@ -325,9 +333,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedEngagementTypeList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedEngagementTypeList, parse_obj_as(type_=PaginatedEngagementTypeList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -364,15 +372,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.engagement_types.retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.engagement_types.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/engagement-types/{jsonable_encoder(id)}", @@ -380,9 +396,9 @@ async def retrieve( params={"include_remote_data": include_remote_data, "include_remote_fields": include_remote_fields}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EngagementType, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(EngagementType, parse_obj_as(type_=EngagementType, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -428,13 +444,21 @@ async def remote_field_classes_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.engagement_types.remote_field_classes_list() + + + async def main() -> None: + await client.crm.engagement_types.remote_field_classes_list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/engagement-types/remote-field-classes", @@ -448,9 +472,9 @@ async def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/engagements/client.py b/src/merge/resources/crm/resources/engagements/client.py index f622eec6..79cf57a7 100644 --- a/src/merge/resources/crm/resources/engagements/client.py +++ b/src/merge/resources/crm/resources/engagements/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.engagement import Engagement from ...types.engagement_request import EngagementRequest @@ -128,9 +128,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedEngagementList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedEngagementList, parse_obj_as(type_=PaginatedEngagementList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -186,9 +186,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EngagementResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(EngagementResponse, parse_obj_as(type_=EngagementResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -249,9 +249,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Engagement, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Engagement, parse_obj_as(type_=Engagement, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -311,9 +311,9 @@ def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EngagementResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(EngagementResponse, parse_obj_as(type_=EngagementResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -350,9 +350,9 @@ def meta_patch_retrieve(self, id: str, *, request_options: typing.Optional[Reque _response = self._client_wrapper.httpx_client.request( f"crm/v1/engagements/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -385,9 +385,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "crm/v1/engagements/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -453,9 +453,9 @@ def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -538,13 +538,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.engagements.list() + + + async def main() -> None: + await client.crm.engagements.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/engagements", @@ -566,9 +574,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedEngagementList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedEngagementList, parse_obj_as(type_=PaginatedEngagementList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -605,6 +613,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import EngagementRequest @@ -612,9 +622,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.engagements.create( - model=EngagementRequest(), - ) + + + async def main() -> None: + await client.crm.engagements.create( + model=EngagementRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/engagements", @@ -624,9 +640,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EngagementResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(EngagementResponse, parse_obj_as(type_=EngagementResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -667,15 +683,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.engagements.retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.engagements.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/engagements/{jsonable_encoder(id)}", @@ -687,9 +711,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Engagement, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Engagement, parse_obj_as(type_=Engagement, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -729,6 +753,8 @@ async def partial_update( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import PatchedEngagementRequest @@ -736,10 +762,16 @@ async def partial_update( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.engagements.partial_update( - id="id", - model=PatchedEngagementRequest(), - ) + + + async def main() -> None: + await client.crm.engagements.partial_update( + id="id", + model=PatchedEngagementRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/engagements/{jsonable_encoder(id)}", @@ -749,9 +781,9 @@ async def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EngagementResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(EngagementResponse, parse_obj_as(type_=EngagementResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -777,22 +809,30 @@ async def meta_patch_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.engagements.meta_patch_retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.engagements.meta_patch_retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/engagements/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -814,20 +854,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.engagements.meta_post_retrieve() + + + async def main() -> None: + await client.crm.engagements.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/engagements/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -873,13 +921,21 @@ async def remote_field_classes_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.engagements.remote_field_classes_list() + + + async def main() -> None: + await client.crm.engagements.remote_field_classes_list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/engagements/remote-field-classes", @@ -893,9 +949,9 @@ async def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/field_mapping/client.py b/src/merge/resources/crm/resources/field_mapping/client.py index fe1ded8c..d32cd5fa 100644 --- a/src/merge/resources/crm/resources/field_mapping/client.py +++ b/src/merge/resources/crm/resources/field_mapping/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.external_target_field_api_response import ExternalTargetFieldApiResponse from ...types.field_mapping_api_instance_response import FieldMappingApiInstanceResponse @@ -50,9 +50,9 @@ def field_mappings_retrieve( _response = self._client_wrapper.httpx_client.request( "crm/v1/field-mappings", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingApiInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingApiInstanceResponse, parse_obj_as(type_=FieldMappingApiInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -131,9 +131,9 @@ def field_mappings_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -174,9 +174,9 @@ def field_mappings_destroy( method="DELETE", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -238,9 +238,9 @@ def field_mappings_partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -288,9 +288,9 @@ def remote_fields_retrieve( params={"common_models": common_models, "include_example_values": include_example_values}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteFieldApiResponse, parse_obj_as(type_=RemoteFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -325,9 +325,9 @@ def target_fields_retrieve( _response = self._client_wrapper.httpx_client.request( "crm/v1/target-fields", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExternalTargetFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ExternalTargetFieldApiResponse, parse_obj_as(type_=ExternalTargetFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -356,20 +356,28 @@ async def field_mappings_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.field_mapping.field_mappings_retrieve() + + + async def main() -> None: + await client.crm.field_mapping.field_mappings_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/field-mappings", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingApiInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingApiInstanceResponse, parse_obj_as(type_=FieldMappingApiInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -419,20 +427,28 @@ async def field_mappings_create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.field_mapping.field_mappings_create( - target_field_name="example_target_field_name", - target_field_description="this is a example description of the target field", - remote_field_traversal_path=["example_remote_field"], - remote_method="GET", - remote_url_path="/example-url-path", - common_model_name="ExampleCommonModel", - ) + + + async def main() -> None: + await client.crm.field_mapping.field_mappings_create( + target_field_name="example_target_field_name", + target_field_description="this is a example description of the target field", + remote_field_traversal_path=["example_remote_field"], + remote_method="GET", + remote_url_path="/example-url-path", + common_model_name="ExampleCommonModel", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/field-mappings", @@ -448,9 +464,9 @@ async def field_mappings_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -476,24 +492,32 @@ async def field_mappings_destroy( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.field_mapping.field_mappings_destroy( - field_mapping_id="field_mapping_id", - ) + + + async def main() -> None: + await client.crm.field_mapping.field_mappings_destroy( + field_mapping_id="field_mapping_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/field-mappings/{jsonable_encoder(field_mapping_id)}", method="DELETE", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -534,15 +558,23 @@ async def field_mappings_partial_update( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.field_mapping.field_mappings_partial_update( - field_mapping_id="field_mapping_id", - ) + + + async def main() -> None: + await client.crm.field_mapping.field_mappings_partial_update( + field_mapping_id="field_mapping_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/field-mappings/{jsonable_encoder(field_mapping_id)}", @@ -555,9 +587,9 @@ async def field_mappings_partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -591,13 +623,21 @@ async def remote_fields_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.field_mapping.remote_fields_retrieve() + + + async def main() -> None: + await client.crm.field_mapping.remote_fields_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/remote-fields", @@ -605,9 +645,9 @@ async def remote_fields_retrieve( params={"common_models": common_models, "include_example_values": include_example_values}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteFieldApiResponse, parse_obj_as(type_=RemoteFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -631,20 +671,28 @@ async def target_fields_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.field_mapping.target_fields_retrieve() + + + async def main() -> None: + await client.crm.field_mapping.target_fields_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/target-fields", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExternalTargetFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ExternalTargetFieldApiResponse, parse_obj_as(type_=ExternalTargetFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/force_resync/client.py b/src/merge/resources/crm/resources/force_resync/client.py index 5b966529..18dcb9f5 100644 --- a/src/merge/resources/crm/resources/force_resync/client.py +++ b/src/merge/resources/crm/resources/force_resync/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.sync_status import SyncStatus @@ -43,9 +43,9 @@ def sync_status_resync_create( _response = self._client_wrapper.httpx_client.request( "crm/v1/sync-status/resync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[SyncStatus], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[SyncStatus], parse_obj_as(type_=typing.List[SyncStatus], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -74,20 +74,28 @@ async def sync_status_resync_create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.force_resync.sync_status_resync_create() + + + async def main() -> None: + await client.crm.force_resync.sync_status_resync_create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/sync-status/resync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[SyncStatus], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[SyncStatus], parse_obj_as(type_=typing.List[SyncStatus], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/generate_key/client.py b/src/merge/resources/crm/resources/generate_key/client.py index 48c5a4c3..1172ece1 100644 --- a/src/merge/resources/crm/resources/generate_key/client.py +++ b/src/merge/resources/crm/resources/generate_key/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.remote_key import RemoteKey @@ -49,9 +49,9 @@ def create(self, *, name: str, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "crm/v1/generate-key", method="POST", json={"name": name}, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -81,22 +81,30 @@ async def create(self, *, name: str, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.generate_key.create( - name="Remote Deployment Key 1", - ) + + + async def main() -> None: + await client.crm.generate_key.create( + name="Remote Deployment Key 1", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/generate-key", method="POST", json={"name": name}, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/issues/client.py b/src/merge/resources/crm/resources/issues/client.py index c61cfdea..5f4d2110 100644 --- a/src/merge/resources/crm/resources/issues/client.py +++ b/src/merge/resources/crm/resources/issues/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.issue import Issue from ...types.paginated_issue_list import PaginatedIssueList @@ -127,9 +127,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedIssueList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedIssueList, parse_obj_as(type_=PaginatedIssueList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -166,9 +166,9 @@ def retrieve(self, id: str, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( f"crm/v1/issues/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Issue, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Issue, parse_obj_as(type_=Issue, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -251,13 +251,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.issues.list() + + + async def main() -> None: + await client.crm.issues.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/issues", @@ -287,9 +295,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedIssueList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedIssueList, parse_obj_as(type_=PaginatedIssueList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -313,22 +321,30 @@ async def retrieve(self, id: str, *, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.issues.retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.issues.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/issues/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Issue, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Issue, parse_obj_as(type_=Issue, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/leads/client.py b/src/merge/resources/crm/resources/leads/client.py index dda95d9d..6c541f98 100644 --- a/src/merge/resources/crm/resources/leads/client.py +++ b/src/merge/resources/crm/resources/leads/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.lead import Lead from ...types.lead_request import LeadRequest @@ -142,9 +142,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedLeadList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedLeadList, parse_obj_as(type_=PaginatedLeadList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -200,9 +200,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LeadResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(LeadResponse, parse_obj_as(type_=LeadResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -263,9 +263,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Lead, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Lead, parse_obj_as(type_=Lead, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -298,9 +298,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "crm/v1/leads/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -366,9 +366,9 @@ def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -463,13 +463,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.leads.list() + + + async def main() -> None: + await client.crm.leads.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/leads", @@ -494,9 +502,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedLeadList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedLeadList, parse_obj_as(type_=PaginatedLeadList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -533,6 +541,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import LeadRequest @@ -540,9 +550,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.leads.create( - model=LeadRequest(), - ) + + + async def main() -> None: + await client.crm.leads.create( + model=LeadRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/leads", @@ -552,9 +568,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LeadResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(LeadResponse, parse_obj_as(type_=LeadResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -595,15 +611,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.leads.retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.leads.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/leads/{jsonable_encoder(id)}", @@ -615,9 +639,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Lead, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Lead, parse_obj_as(type_=Lead, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -639,20 +663,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.leads.meta_post_retrieve() + + + async def main() -> None: + await client.crm.leads.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/leads/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -698,13 +730,21 @@ async def remote_field_classes_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.leads.remote_field_classes_list() + + + async def main() -> None: + await client.crm.leads.remote_field_classes_list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/leads/remote-field-classes", @@ -718,9 +758,9 @@ async def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/link_token/client.py b/src/merge/resources/crm/resources/link_token/client.py index 10e51e5a..9610c07a 100644 --- a/src/merge/resources/crm/resources/link_token/client.py +++ b/src/merge/resources/crm/resources/link_token/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.categories_enum import CategoriesEnum from ...types.common_model_scopes_body_request import CommonModelScopesBodyRequest @@ -119,9 +119,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LinkToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(LinkToken, parse_obj_as(type_=LinkToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -198,6 +198,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import CategoriesEnum @@ -205,12 +207,18 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.link_token.create( - end_user_email_address="example@gmail.com", - end_user_organization_name="Test Organization", - end_user_origin_id="12345", - categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], - ) + + + async def main() -> None: + await client.crm.link_token.create( + end_user_email_address="example@gmail.com", + end_user_organization_name="Test Organization", + end_user_origin_id="12345", + categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/link-token", @@ -231,9 +239,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LinkToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(LinkToken, parse_obj_as(type_=LinkToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/linked_accounts/client.py b/src/merge/resources/crm/resources/linked_accounts/client.py index 3291acb6..44109781 100644 --- a/src/merge/resources/crm/resources/linked_accounts/client.py +++ b/src/merge/resources/crm/resources/linked_accounts/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_account_details_and_actions_list import PaginatedAccountDetailsAndActionsList from .types.linked_accounts_list_request_category import LinkedAccountsListRequestCategory @@ -122,9 +122,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountDetailsAndActionsList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountDetailsAndActionsList, parse_obj_as(type_=PaginatedAccountDetailsAndActionsList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -214,13 +214,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.linked_accounts.list() + + + async def main() -> None: + await client.crm.linked_accounts.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/linked-accounts", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountDetailsAndActionsList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountDetailsAndActionsList, parse_obj_as(type_=PaginatedAccountDetailsAndActionsList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/notes/client.py b/src/merge/resources/crm/resources/notes/client.py index 71b21b87..89043ee8 100644 --- a/src/merge/resources/crm/resources/notes/client.py +++ b/src/merge/resources/crm/resources/notes/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.meta_response import MetaResponse from ...types.note import Note @@ -137,9 +137,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedNoteList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedNoteList, parse_obj_as(type_=PaginatedNoteList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -195,9 +195,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(NoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(NoteResponse, parse_obj_as(type_=NoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -258,9 +258,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Note, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Note, parse_obj_as(type_=Note, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -293,9 +293,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "crm/v1/notes/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -361,9 +361,9 @@ def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -454,13 +454,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.notes.list() + + + async def main() -> None: + await client.crm.notes.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/notes", @@ -484,9 +492,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedNoteList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedNoteList, parse_obj_as(type_=PaginatedNoteList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -523,6 +531,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import NoteRequest @@ -530,9 +540,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.notes.create( - model=NoteRequest(), - ) + + + async def main() -> None: + await client.crm.notes.create( + model=NoteRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/notes", @@ -542,9 +558,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(NoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(NoteResponse, parse_obj_as(type_=NoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -585,15 +601,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.notes.retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.notes.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/notes/{jsonable_encoder(id)}", @@ -605,9 +629,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Note, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Note, parse_obj_as(type_=Note, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -629,20 +653,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.notes.meta_post_retrieve() + + + async def main() -> None: + await client.crm.notes.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/notes/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -688,13 +720,21 @@ async def remote_field_classes_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.notes.remote_field_classes_list() + + + async def main() -> None: + await client.crm.notes.remote_field_classes_list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/notes/remote-field-classes", @@ -708,9 +748,9 @@ async def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/opportunities/client.py b/src/merge/resources/crm/resources/opportunities/client.py index 9854d020..5bdc4a37 100644 --- a/src/merge/resources/crm/resources/opportunities/client.py +++ b/src/merge/resources/crm/resources/opportunities/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.meta_response import MetaResponse from ...types.opportunity import Opportunity @@ -153,9 +153,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedOpportunityList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedOpportunityList, parse_obj_as(type_=PaginatedOpportunityList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -211,9 +211,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(OpportunityResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(OpportunityResponse, parse_obj_as(type_=OpportunityResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -284,9 +284,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Opportunity, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Opportunity, parse_obj_as(type_=Opportunity, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -346,9 +346,9 @@ def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(OpportunityResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(OpportunityResponse, parse_obj_as(type_=OpportunityResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -385,9 +385,9 @@ def meta_patch_retrieve(self, id: str, *, request_options: typing.Optional[Reque _response = self._client_wrapper.httpx_client.request( f"crm/v1/opportunities/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -420,9 +420,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "crm/v1/opportunities/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -488,9 +488,9 @@ def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -593,13 +593,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.opportunities.list() + + + async def main() -> None: + await client.crm.opportunities.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/opportunities", @@ -625,9 +633,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedOpportunityList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedOpportunityList, parse_obj_as(type_=PaginatedOpportunityList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -664,6 +672,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import OpportunityRequest @@ -671,9 +681,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.opportunities.create( - model=OpportunityRequest(), - ) + + + async def main() -> None: + await client.crm.opportunities.create( + model=OpportunityRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/opportunities", @@ -683,9 +699,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(OpportunityResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(OpportunityResponse, parse_obj_as(type_=OpportunityResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -734,15 +750,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.opportunities.retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.opportunities.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/opportunities/{jsonable_encoder(id)}", @@ -756,9 +780,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Opportunity, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Opportunity, parse_obj_as(type_=Opportunity, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -798,6 +822,8 @@ async def partial_update( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import PatchedOpportunityRequest @@ -805,10 +831,16 @@ async def partial_update( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.opportunities.partial_update( - id="id", - model=PatchedOpportunityRequest(), - ) + + + async def main() -> None: + await client.crm.opportunities.partial_update( + id="id", + model=PatchedOpportunityRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/opportunities/{jsonable_encoder(id)}", @@ -818,9 +850,9 @@ async def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(OpportunityResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(OpportunityResponse, parse_obj_as(type_=OpportunityResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -846,22 +878,30 @@ async def meta_patch_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.opportunities.meta_patch_retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.opportunities.meta_patch_retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/opportunities/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -883,20 +923,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.opportunities.meta_post_retrieve() + + + async def main() -> None: + await client.crm.opportunities.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/opportunities/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -942,13 +990,21 @@ async def remote_field_classes_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.opportunities.remote_field_classes_list() + + + async def main() -> None: + await client.crm.opportunities.remote_field_classes_list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/opportunities/remote-field-classes", @@ -962,9 +1018,9 @@ async def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/passthrough/client.py b/src/merge/resources/crm/resources/passthrough/client.py index 68530e9f..b18771e0 100644 --- a/src/merge/resources/crm/resources/passthrough/client.py +++ b/src/merge/resources/crm/resources/passthrough/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.data_passthrough_request import DataPassthroughRequest from ...types.remote_response import RemoteResponse @@ -55,9 +55,9 @@ def create( _response = self._client_wrapper.httpx_client.request( "crm/v1/passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -88,6 +88,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import DataPassthroughRequest, MethodEnum @@ -95,19 +97,25 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.passthrough.create( - request=DataPassthroughRequest( - method=MethodEnum.GET, - path="/scooters", - ), - ) + + + async def main() -> None: + await client.crm.passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/regenerate_key/client.py b/src/merge/resources/crm/resources/regenerate_key/client.py index 8434bb94..f6b34365 100644 --- a/src/merge/resources/crm/resources/regenerate_key/client.py +++ b/src/merge/resources/crm/resources/regenerate_key/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.remote_key import RemoteKey @@ -49,9 +49,9 @@ def create(self, *, name: str, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "crm/v1/regenerate-key", method="POST", json={"name": name}, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -81,22 +81,30 @@ async def create(self, *, name: str, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.regenerate_key.create( - name="Remote Deployment Key 1", - ) + + + async def main() -> None: + await client.crm.regenerate_key.create( + name="Remote Deployment Key 1", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/regenerate-key", method="POST", json={"name": name}, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/scopes/client.py b/src/merge/resources/crm/resources/scopes/client.py index 54e00986..afb6cdb6 100644 --- a/src/merge/resources/crm/resources/scopes/client.py +++ b/src/merge/resources/crm/resources/scopes/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.common_model_scope_api import CommonModelScopeApi from ...types.individual_common_model_scope_deserializer_request import IndividualCommonModelScopeDeserializerRequest @@ -47,9 +47,9 @@ def default_scopes_retrieve( _response = self._client_wrapper.httpx_client.request( "crm/v1/default-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -84,9 +84,9 @@ def linked_account_scopes_retrieve( _response = self._client_wrapper.httpx_client.request( "crm/v1/linked-account-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -157,9 +157,9 @@ def linked_account_scopes_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -188,20 +188,28 @@ async def default_scopes_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.scopes.default_scopes_retrieve() + + + async def main() -> None: + await client.crm.scopes.default_scopes_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/default-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -225,20 +233,28 @@ async def linked_account_scopes_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.scopes.linked_account_scopes_retrieve() + + + async def main() -> None: + await client.crm.scopes.linked_account_scopes_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/linked-account-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -268,6 +284,8 @@ async def linked_account_scopes_create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import ( IndividualCommonModelScopeDeserializerRequest, @@ -278,29 +296,35 @@ async def linked_account_scopes_create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.scopes.linked_account_scopes_create( - common_models=[ - IndividualCommonModelScopeDeserializerRequest( - model_name="Employee", - model_permissions={ - "READ": ModelPermissionDeserializerRequest( - is_enabled=True, - ), - "WRITE": ModelPermissionDeserializerRequest( - is_enabled=False, - ), - }, - ), - IndividualCommonModelScopeDeserializerRequest( - model_name="Benefit", - model_permissions={ - "WRITE": ModelPermissionDeserializerRequest( - is_enabled=False, - ) - }, - ), - ], - ) + + + async def main() -> None: + await client.crm.scopes.linked_account_scopes_create( + common_models=[ + IndividualCommonModelScopeDeserializerRequest( + model_name="Employee", + model_permissions={ + "READ": ModelPermissionDeserializerRequest( + is_enabled=True, + ), + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ), + }, + ), + IndividualCommonModelScopeDeserializerRequest( + model_name="Benefit", + model_permissions={ + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ) + }, + ), + ], + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/linked-account-scopes", @@ -309,9 +333,9 @@ async def linked_account_scopes_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/stages/client.py b/src/merge/resources/crm/resources/stages/client.py index 0c427288..287f67f4 100644 --- a/src/merge/resources/crm/resources/stages/client.py +++ b/src/merge/resources/crm/resources/stages/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_remote_field_class_list import PaginatedRemoteFieldClassList from ...types.paginated_stage_list import PaginatedStageList @@ -104,9 +104,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedStageList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedStageList, parse_obj_as(type_=PaginatedStageList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -159,9 +159,9 @@ def retrieve( params={"include_remote_data": include_remote_data, "include_remote_fields": include_remote_fields}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Stage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Stage, parse_obj_as(type_=Stage, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -227,9 +227,9 @@ def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -300,13 +300,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.stages.list() + + + async def main() -> None: + await client.crm.stages.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/stages", @@ -325,9 +333,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedStageList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedStageList, parse_obj_as(type_=PaginatedStageList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -364,15 +372,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.stages.retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.stages.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/stages/{jsonable_encoder(id)}", @@ -380,9 +396,9 @@ async def retrieve( params={"include_remote_data": include_remote_data, "include_remote_fields": include_remote_fields}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Stage, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Stage, parse_obj_as(type_=Stage, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -428,13 +444,21 @@ async def remote_field_classes_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.stages.remote_field_classes_list() + + + async def main() -> None: + await client.crm.stages.remote_field_classes_list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/stages/remote-field-classes", @@ -448,9 +472,9 @@ async def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/sync_status/client.py b/src/merge/resources/crm/resources/sync_status/client.py index 9dd4c789..824461ac 100644 --- a/src/merge/resources/crm/resources/sync_status/client.py +++ b/src/merge/resources/crm/resources/sync_status/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_sync_status_list import PaginatedSyncStatusList @@ -56,9 +56,9 @@ def list( params={"cursor": cursor, "page_size": page_size}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedSyncStatusList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedSyncStatusList, parse_obj_as(type_=PaginatedSyncStatusList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -97,13 +97,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.sync_status.list() + + + async def main() -> None: + await client.crm.sync_status.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/sync-status", @@ -111,9 +119,9 @@ async def list( params={"cursor": cursor, "page_size": page_size}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedSyncStatusList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedSyncStatusList, parse_obj_as(type_=PaginatedSyncStatusList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/tasks/client.py b/src/merge/resources/crm/resources/tasks/client.py index c6ae647d..e5705618 100644 --- a/src/merge/resources/crm/resources/tasks/client.py +++ b/src/merge/resources/crm/resources/tasks/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.meta_response import MetaResponse from ...types.paginated_remote_field_class_list import PaginatedRemoteFieldClassList @@ -118,9 +118,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTaskList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTaskList, parse_obj_as(type_=PaginatedTaskList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -176,9 +176,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TaskResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TaskResponse, parse_obj_as(type_=TaskResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -239,9 +239,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Task, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Task, parse_obj_as(type_=Task, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -301,9 +301,9 @@ def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TaskResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TaskResponse, parse_obj_as(type_=TaskResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -340,9 +340,9 @@ def meta_patch_retrieve(self, id: str, *, request_options: typing.Optional[Reque _response = self._client_wrapper.httpx_client.request( f"crm/v1/tasks/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -375,9 +375,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "crm/v1/tasks/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -443,9 +443,9 @@ def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -520,13 +520,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.tasks.list() + + + async def main() -> None: + await client.crm.tasks.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/tasks", @@ -546,9 +554,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTaskList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTaskList, parse_obj_as(type_=PaginatedTaskList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -585,6 +593,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import TaskRequest @@ -592,9 +602,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.tasks.create( - model=TaskRequest(), - ) + + + async def main() -> None: + await client.crm.tasks.create( + model=TaskRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/tasks", @@ -604,9 +620,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TaskResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TaskResponse, parse_obj_as(type_=TaskResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -647,15 +663,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.tasks.retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.tasks.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/tasks/{jsonable_encoder(id)}", @@ -667,9 +691,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Task, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Task, parse_obj_as(type_=Task, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -709,6 +733,8 @@ async def partial_update( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import PatchedTaskRequest @@ -716,10 +742,16 @@ async def partial_update( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.tasks.partial_update( - id="id", - model=PatchedTaskRequest(), - ) + + + async def main() -> None: + await client.crm.tasks.partial_update( + id="id", + model=PatchedTaskRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/tasks/{jsonable_encoder(id)}", @@ -729,9 +761,9 @@ async def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TaskResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TaskResponse, parse_obj_as(type_=TaskResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -757,22 +789,30 @@ async def meta_patch_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.tasks.meta_patch_retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.tasks.meta_patch_retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/tasks/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -794,20 +834,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.tasks.meta_post_retrieve() + + + async def main() -> None: + await client.crm.tasks.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/tasks/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -853,13 +901,21 @@ async def remote_field_classes_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.tasks.remote_field_classes_list() + + + async def main() -> None: + await client.crm.tasks.remote_field_classes_list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/tasks/remote-field-classes", @@ -873,9 +929,9 @@ async def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/users/client.py b/src/merge/resources/crm/resources/users/client.py index a54f2177..5a51453b 100644 --- a/src/merge/resources/crm/resources/users/client.py +++ b/src/merge/resources/crm/resources/users/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.ignore_common_model_request import IgnoreCommonModelRequest from ...types.paginated_remote_field_class_list import PaginatedRemoteFieldClassList @@ -108,9 +108,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedUserList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedUserList, parse_obj_as(type_=PaginatedUserList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -163,9 +163,9 @@ def retrieve( params={"include_remote_data": include_remote_data, "include_remote_fields": include_remote_fields}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(User, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(User, parse_obj_as(type_=User, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -217,9 +217,9 @@ def ignore_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -285,9 +285,9 @@ def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -358,13 +358,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.users.list() + + + async def main() -> None: + await client.crm.users.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/users", @@ -383,9 +391,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedUserList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedUserList, parse_obj_as(type_=PaginatedUserList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -422,15 +430,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.users.retrieve( - id="id", - ) + + + async def main() -> None: + await client.crm.users.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/users/{jsonable_encoder(id)}", @@ -438,9 +454,9 @@ async def retrieve( params={"include_remote_data": include_remote_data, "include_remote_fields": include_remote_fields}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(User, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(User, parse_obj_as(type_=User, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -471,6 +487,8 @@ async def ignore_create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.crm import IgnoreCommonModelRequest, ReasonEnum @@ -478,12 +496,18 @@ async def ignore_create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.users.ignore_create( - model_id="model_id", - request=IgnoreCommonModelRequest( - reason=ReasonEnum.GENERAL_CUSTOMER_REQUEST, - ), - ) + + + async def main() -> None: + await client.crm.users.ignore_create( + model_id="model_id", + request=IgnoreCommonModelRequest( + reason=ReasonEnum.GENERAL_CUSTOMER_REQUEST, + ), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"crm/v1/users/ignore/{jsonable_encoder(model_id)}", @@ -492,9 +516,9 @@ async def ignore_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -540,13 +564,21 @@ async def remote_field_classes_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.users.remote_field_classes_list() + + + async def main() -> None: + await client.crm.users.remote_field_classes_list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/users/remote-field-classes", @@ -560,9 +592,9 @@ async def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/resources/webhook_receivers/client.py b/src/merge/resources/crm/resources/webhook_receivers/client.py index 0ff5f9cf..edb981f1 100644 --- a/src/merge/resources/crm/resources/webhook_receivers/client.py +++ b/src/merge/resources/crm/resources/webhook_receivers/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.webhook_receiver import WebhookReceiver @@ -44,9 +44,9 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty _response = self._client_wrapper.httpx_client.request( "crm/v1/webhook-receivers", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[WebhookReceiver], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[WebhookReceiver], parse_obj_as(type_=typing.List[WebhookReceiver], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -99,9 +99,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(WebhookReceiver, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(WebhookReceiver, parse_obj_as(type_=WebhookReceiver, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -128,20 +128,28 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.webhook_receivers.list() + + + async def main() -> None: + await client.crm.webhook_receivers.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/webhook-receivers", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[WebhookReceiver], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[WebhookReceiver], parse_obj_as(type_=typing.List[WebhookReceiver], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -176,16 +184,24 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.crm.webhook_receivers.create( - event="event", - is_active=True, - ) + + + async def main() -> None: + await client.crm.webhook_receivers.create( + event="event", + is_active=True, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "crm/v1/webhook-receivers", @@ -194,9 +210,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(WebhookReceiver, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(WebhookReceiver, parse_obj_as(type_=WebhookReceiver, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/crm/types/account.py b/src/merge/resources/crm/types/account.py index 1595f9ac..01afa8da 100644 --- a/src/merge/resources/crm/types/account.py +++ b/src/merge/resources/crm/types/account.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_owner import AccountOwner from .address import Address from .phone_number import PhoneNumber @@ -12,7 +13,7 @@ from .remote_field import RemoteField -class Account(pydantic_v1.BaseModel): +class Account(UniversalBaseModel): """ # The Account Object @@ -26,64 +27,64 @@ class Account(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - owner: typing.Optional[AccountOwner] = pydantic_v1.Field() + owner: typing.Optional[AccountOwner] = pydantic.Field() """ The account's owner. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The account's name. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The account's description. """ - industry: typing.Optional[str] = pydantic_v1.Field() + industry: typing.Optional[str] = pydantic.Field() """ The account's industry. """ - website: typing.Optional[str] = pydantic_v1.Field() + website: typing.Optional[str] = pydantic.Field() """ The account's website. """ - number_of_employees: typing.Optional[int] = pydantic_v1.Field() + number_of_employees: typing.Optional[int] = pydantic.Field() """ The account's number of employees. """ addresses: typing.Optional[typing.List[Address]] phone_numbers: typing.Optional[typing.List[PhoneNumber]] - last_activity_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + last_activity_at: typing.Optional[dt.datetime] = pydantic.Field() """ The last date (either most recent or furthest in the future) of when an activity occurs in an account. """ - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the CRM system account data was last modified by a user with a login. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's account was created. """ @@ -93,20 +94,11 @@ class Account(pydantic_v1.BaseModel): remote_data: typing.Optional[typing.List[RemoteData]] remote_fields: typing.Optional[typing.List[RemoteField]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/account_details.py b/src/merge/resources/crm/types/account_details.py index 81f9587b..58cd348a 100644 --- a/src/merge/resources/crm/types/account_details.py +++ b/src/merge/resources/crm/types/account_details.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .category_enum import CategoryEnum -class AccountDetails(pydantic_v1.BaseModel): +class AccountDetails(UniversalBaseModel): id: typing.Optional[str] integration: typing.Optional[str] integration_slug: typing.Optional[str] @@ -18,27 +18,18 @@ class AccountDetails(pydantic_v1.BaseModel): end_user_email_address: typing.Optional[str] status: typing.Optional[str] webhook_listener_url: typing.Optional[str] - is_duplicate: typing.Optional[bool] = pydantic_v1.Field() + is_duplicate: typing.Optional[bool] = pydantic.Field() """ Whether a Production Linked Account's credentials match another existing Production Linked Account. This field is `null` for Test Linked Accounts, incomplete Production Linked Accounts, and ignored duplicate Production Linked Account sets. """ account_type: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/account_details_and_actions.py b/src/merge/resources/crm/types/account_details_and_actions.py index 6a1b2874..ffc38f58 100644 --- a/src/merge/resources/crm/types/account_details_and_actions.py +++ b/src/merge/resources/crm/types/account_details_and_actions.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_details_and_actions_integration import AccountDetailsAndActionsIntegration from .account_details_and_actions_status_enum import AccountDetailsAndActionsStatusEnum from .category_enum import CategoryEnum -class AccountDetailsAndActions(pydantic_v1.BaseModel): +class AccountDetailsAndActions(UniversalBaseModel): """ # The LinkedAccount Object @@ -30,13 +30,13 @@ class AccountDetailsAndActions(pydantic_v1.BaseModel): end_user_origin_id: typing.Optional[str] end_user_organization_name: str end_user_email_address: str - subdomain: typing.Optional[str] = pydantic_v1.Field() + subdomain: typing.Optional[str] = pydantic.Field() """ The tenant or domain the customer has provided access to. """ webhook_listener_url: str - is_duplicate: typing.Optional[bool] = pydantic_v1.Field() + is_duplicate: typing.Optional[bool] = pydantic.Field() """ Whether a Production Linked Account's credentials match another existing Production Linked Account. This field is `null` for Test Linked Accounts, incomplete Production Linked Accounts, and ignored duplicate Production Linked Account sets. """ @@ -44,20 +44,11 @@ class AccountDetailsAndActions(pydantic_v1.BaseModel): integration: typing.Optional[AccountDetailsAndActionsIntegration] account_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/account_details_and_actions_integration.py b/src/merge/resources/crm/types/account_details_and_actions_integration.py index 7c300a2b..8aa63726 100644 --- a/src/merge/resources/crm/types/account_details_and_actions_integration.py +++ b/src/merge/resources/crm/types/account_details_and_actions_integration.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .categories_enum import CategoriesEnum from .model_operation import ModelOperation -class AccountDetailsAndActionsIntegration(pydantic_v1.BaseModel): +class AccountDetailsAndActionsIntegration(UniversalBaseModel): name: str categories: typing.List[CategoriesEnum] image: typing.Optional[str] @@ -19,20 +19,11 @@ class AccountDetailsAndActionsIntegration(pydantic_v1.BaseModel): passthrough_available: bool available_model_operations: typing.Optional[typing.List[ModelOperation]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/account_integration.py b/src/merge/resources/crm/types/account_integration.py index 57006e35..7a8a27ab 100644 --- a/src/merge/resources/crm/types/account_integration.py +++ b/src/merge/resources/crm/types/account_integration.py @@ -1,69 +1,60 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .categories_enum import CategoriesEnum -class AccountIntegration(pydantic_v1.BaseModel): - name: str = pydantic_v1.Field() +class AccountIntegration(UniversalBaseModel): + name: str = pydantic.Field() """ Company name. """ - categories: typing.Optional[typing.List[CategoriesEnum]] = pydantic_v1.Field() + categories: typing.Optional[typing.List[CategoriesEnum]] = pydantic.Field() """ Category or categories this integration belongs to. Multiple categories should be comma separated, i.e. [ats, hris]. """ - image: typing.Optional[str] = pydantic_v1.Field() + image: typing.Optional[str] = pydantic.Field() """ Company logo in rectangular shape. Upload an image with a clear background. """ - square_image: typing.Optional[str] = pydantic_v1.Field() + square_image: typing.Optional[str] = pydantic.Field() """ Company logo in square shape. Upload an image with a white background. """ - color: typing.Optional[str] = pydantic_v1.Field() + color: typing.Optional[str] = pydantic.Field() """ The color of this integration used for buttons and text throughout the app and landing pages. Choose a darker, saturated color. """ slug: typing.Optional[str] - api_endpoints_to_documentation_urls: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + api_endpoints_to_documentation_urls: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ Mapping of API endpoints to documentation urls for support. Example: {'GET': [['/common-model-scopes', 'https://docs.merge.dev/accounting/common-model-scopes/#common_model_scopes_retrieve'],['/common-model-actions', 'https://docs.merge.dev/accounting/common-model-actions/#common_model_actions_retrieve']], 'POST': []} """ - webhook_setup_guide_url: typing.Optional[str] = pydantic_v1.Field() + webhook_setup_guide_url: typing.Optional[str] = pydantic.Field() """ Setup guide URL for third party webhook creation. Exposed in Merge Docs. """ - category_beta_status: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + category_beta_status: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ Category or categories this integration is in beta status for. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/account_request.py b/src/merge/resources/crm/types/account_request.py index fa550ba2..c37c200c 100644 --- a/src/merge/resources/crm/types/account_request.py +++ b/src/merge/resources/crm/types/account_request.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_request_owner import AccountRequestOwner from .address_request import AddressRequest from .remote_field_request import RemoteFieldRequest -class AccountRequest(pydantic_v1.BaseModel): +class AccountRequest(UniversalBaseModel): """ # The Account Object @@ -23,38 +24,38 @@ class AccountRequest(pydantic_v1.BaseModel): TODO """ - owner: typing.Optional[AccountRequestOwner] = pydantic_v1.Field() + owner: typing.Optional[AccountRequestOwner] = pydantic.Field() """ The account's owner. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The account's name. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The account's description. """ - industry: typing.Optional[str] = pydantic_v1.Field() + industry: typing.Optional[str] = pydantic.Field() """ The account's industry. """ - website: typing.Optional[str] = pydantic_v1.Field() + website: typing.Optional[str] = pydantic.Field() """ The account's website. """ - number_of_employees: typing.Optional[int] = pydantic_v1.Field() + number_of_employees: typing.Optional[int] = pydantic.Field() """ The account's number of employees. """ addresses: typing.Optional[typing.List[AddressRequest]] - last_activity_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + last_activity_at: typing.Optional[dt.datetime] = pydantic.Field() """ The last date (either most recent or furthest in the future) of when an activity occurs in an account. """ @@ -63,20 +64,11 @@ class AccountRequest(pydantic_v1.BaseModel): linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] remote_fields: typing.Optional[typing.List[RemoteFieldRequest]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/account_token.py b/src/merge/resources/crm/types/account_token.py index 4794fea3..c280c7cb 100644 --- a/src/merge/resources/crm/types/account_token.py +++ b/src/merge/resources/crm/types/account_token.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_integration import AccountIntegration -class AccountToken(pydantic_v1.BaseModel): +class AccountToken(UniversalBaseModel): account_token: str integration: AccountIntegration - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/address.py b/src/merge/resources/crm/types/address.py index 29ae0c0f..6d1ea2ba 100644 --- a/src/merge/resources/crm/types/address.py +++ b/src/merge/resources/crm/types/address.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .address_address_type import AddressAddressType from .address_country import AddressCountry -class Address(pydantic_v1.BaseModel): +class Address(UniversalBaseModel): """ # The Address Object @@ -22,42 +23,42 @@ class Address(pydantic_v1.BaseModel): TODO """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - street_1: typing.Optional[str] = pydantic_v1.Field() + street_1: typing.Optional[str] = pydantic.Field() """ Line 1 of the address's street. """ - street_2: typing.Optional[str] = pydantic_v1.Field() + street_2: typing.Optional[str] = pydantic.Field() """ Line 2 of the address's street. """ - city: typing.Optional[str] = pydantic_v1.Field() + city: typing.Optional[str] = pydantic.Field() """ The address's city. """ - state: typing.Optional[str] = pydantic_v1.Field() + state: typing.Optional[str] = pydantic.Field() """ The address's state. """ - postal_code: typing.Optional[str] = pydantic_v1.Field() + postal_code: typing.Optional[str] = pydantic.Field() """ The address's postal code. """ - country: typing.Optional[AddressCountry] = pydantic_v1.Field() + country: typing.Optional[AddressCountry] = pydantic.Field() """ The address's country. @@ -312,7 +313,7 @@ class Address(pydantic_v1.BaseModel): - `ZW` - Zimbabwe """ - address_type: typing.Optional[AddressAddressType] = pydantic_v1.Field() + address_type: typing.Optional[AddressAddressType] = pydantic.Field() """ The address type. @@ -320,20 +321,11 @@ class Address(pydantic_v1.BaseModel): - `SHIPPING` - SHIPPING """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/address_request.py b/src/merge/resources/crm/types/address_request.py index 2fb17841..4aba6cd6 100644 --- a/src/merge/resources/crm/types/address_request.py +++ b/src/merge/resources/crm/types/address_request.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .address_request_address_type import AddressRequestAddressType from .address_request_country import AddressRequestCountry -class AddressRequest(pydantic_v1.BaseModel): +class AddressRequest(UniversalBaseModel): """ # The Address Object @@ -22,32 +22,32 @@ class AddressRequest(pydantic_v1.BaseModel): TODO """ - street_1: typing.Optional[str] = pydantic_v1.Field() + street_1: typing.Optional[str] = pydantic.Field() """ Line 1 of the address's street. """ - street_2: typing.Optional[str] = pydantic_v1.Field() + street_2: typing.Optional[str] = pydantic.Field() """ Line 2 of the address's street. """ - city: typing.Optional[str] = pydantic_v1.Field() + city: typing.Optional[str] = pydantic.Field() """ The address's city. """ - state: typing.Optional[str] = pydantic_v1.Field() + state: typing.Optional[str] = pydantic.Field() """ The address's state. """ - postal_code: typing.Optional[str] = pydantic_v1.Field() + postal_code: typing.Optional[str] = pydantic.Field() """ The address's postal code. """ - country: typing.Optional[AddressRequestCountry] = pydantic_v1.Field() + country: typing.Optional[AddressRequestCountry] = pydantic.Field() """ The address's country. @@ -302,7 +302,7 @@ class AddressRequest(pydantic_v1.BaseModel): - `ZW` - Zimbabwe """ - address_type: typing.Optional[AddressRequestAddressType] = pydantic_v1.Field() + address_type: typing.Optional[AddressRequestAddressType] = pydantic.Field() """ The address type. @@ -313,20 +313,11 @@ class AddressRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/advanced_metadata.py b/src/merge/resources/crm/types/advanced_metadata.py index 2aa7d1d6..5b0ec9e2 100644 --- a/src/merge/resources/crm/types/advanced_metadata.py +++ b/src/merge/resources/crm/types/advanced_metadata.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class AdvancedMetadata(pydantic_v1.BaseModel): + +class AdvancedMetadata(UniversalBaseModel): id: str display_name: typing.Optional[str] description: typing.Optional[str] @@ -15,20 +15,11 @@ class AdvancedMetadata(pydantic_v1.BaseModel): is_custom: typing.Optional[bool] field_choices: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/association.py b/src/merge/resources/crm/types/association.py index 28fd3ad1..f19ed1bb 100644 --- a/src/merge/resources/crm/types/association.py +++ b/src/merge/resources/crm/types/association.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .association_association_type import AssociationAssociationType -class Association(pydantic_v1.BaseModel): +class Association(UniversalBaseModel): """ # The Association Object @@ -21,37 +22,28 @@ class Association(pydantic_v1.BaseModel): TODO """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ source_object: typing.Optional[typing.Dict[str, typing.Any]] target_object: typing.Optional[typing.Dict[str, typing.Any]] - association_type: typing.Optional[AssociationAssociationType] = pydantic_v1.Field() + association_type: typing.Optional[AssociationAssociationType] = pydantic.Field() """ The association type the association belongs to. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/association_sub_type.py b/src/merge/resources/crm/types/association_sub_type.py index db3977cc..cce1d748 100644 --- a/src/merge/resources/crm/types/association_sub_type.py +++ b/src/merge/resources/crm/types/association_sub_type.py @@ -3,38 +3,30 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class AssociationSubType(pydantic_v1.BaseModel): + +class AssociationSubType(UniversalBaseModel): id: typing.Optional[str] - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ origin_type: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/association_type.py b/src/merge/resources/crm/types/association_type.py index 4cce102e..b714b5e7 100644 --- a/src/merge/resources/crm/types/association_type.py +++ b/src/merge/resources/crm/types/association_type.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .association_sub_type import AssociationSubType from .association_type_cardinality import AssociationTypeCardinality -class AssociationType(pydantic_v1.BaseModel): +class AssociationType(UniversalBaseModel): """ # The AssociationType Object @@ -23,22 +24,22 @@ class AssociationType(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - source_object_class: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + source_object_class: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ The class of the source object (Custom Object or Common Model) for the association type. """ @@ -49,20 +50,11 @@ class AssociationType(pydantic_v1.BaseModel): cardinality: typing.Optional[AssociationTypeCardinality] is_required: typing.Optional[bool] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/association_type_request_request.py b/src/merge/resources/crm/types/association_type_request_request.py index 389f6eb8..df043d07 100644 --- a/src/merge/resources/crm/types/association_type_request_request.py +++ b/src/merge/resources/crm/types/association_type_request_request.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .cardinality_enum import CardinalityEnum from .object_class_description_request import ObjectClassDescriptionRequest -class AssociationTypeRequestRequest(pydantic_v1.BaseModel): +class AssociationTypeRequestRequest(UniversalBaseModel): source_object_class: ObjectClassDescriptionRequest target_object_classes: typing.List[ObjectClassDescriptionRequest] remote_key_name: str @@ -17,20 +17,11 @@ class AssociationTypeRequestRequest(pydantic_v1.BaseModel): cardinality: typing.Optional[CardinalityEnum] is_required: typing.Optional[bool] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/async_passthrough_reciept.py b/src/merge/resources/crm/types/async_passthrough_reciept.py index 2cc33210..f2144443 100644 --- a/src/merge/resources/crm/types/async_passthrough_reciept.py +++ b/src/merge/resources/crm/types/async_passthrough_reciept.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class AsyncPassthroughReciept(pydantic_v1.BaseModel): - async_passthrough_receipt_id: str - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class AsyncPassthroughReciept(UniversalBaseModel): + async_passthrough_receipt_id: str - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/audit_log_event.py b/src/merge/resources/crm/types/audit_log_event.py index 43be24a5..c0b4e43a 100644 --- a/src/merge/resources/crm/types/audit_log_event.py +++ b/src/merge/resources/crm/types/audit_log_event.py @@ -3,25 +3,26 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .audit_log_event_event_type import AuditLogEventEventType from .audit_log_event_role import AuditLogEventRole -class AuditLogEvent(pydantic_v1.BaseModel): +class AuditLogEvent(UniversalBaseModel): id: typing.Optional[str] - user_name: typing.Optional[str] = pydantic_v1.Field() + user_name: typing.Optional[str] = pydantic.Field() """ The User's full name at the time of this Event occurring. """ - user_email: typing.Optional[str] = pydantic_v1.Field() + user_email: typing.Optional[str] = pydantic.Field() """ The User's email at the time of this Event occurring. """ - role: AuditLogEventRole = pydantic_v1.Field() + role: AuditLogEventRole = pydantic.Field() """ Designates the role of the user (or SYSTEM/API if action not taken by a user) at the time of this Event occurring. @@ -34,7 +35,7 @@ class AuditLogEvent(pydantic_v1.BaseModel): """ ip_address: str - event_type: AuditLogEventEventType = pydantic_v1.Field() + event_type: AuditLogEventEventType = pydantic.Field() """ Designates the type of event that occurred. @@ -80,20 +81,11 @@ class AuditLogEvent(pydantic_v1.BaseModel): event_description: str created_at: typing.Optional[dt.datetime] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/available_actions.py b/src/merge/resources/crm/types/available_actions.py index bbd94581..1f1d424c 100644 --- a/src/merge/resources/crm/types/available_actions.py +++ b/src/merge/resources/crm/types/available_actions.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_integration import AccountIntegration from .model_operation import ModelOperation -class AvailableActions(pydantic_v1.BaseModel): +class AvailableActions(UniversalBaseModel): """ # The AvailableActions Object @@ -26,20 +26,11 @@ class AvailableActions(pydantic_v1.BaseModel): passthrough_available: bool available_model_operations: typing.Optional[typing.List[ModelOperation]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/common_model_scope_api.py b/src/merge/resources/crm/types/common_model_scope_api.py index d6fdec56..093001ba 100644 --- a/src/merge/resources/crm/types/common_model_scope_api.py +++ b/src/merge/resources/crm/types/common_model_scope_api.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .individual_common_model_scope_deserializer import IndividualCommonModelScopeDeserializer -class CommonModelScopeApi(pydantic_v1.BaseModel): - common_models: typing.List[IndividualCommonModelScopeDeserializer] = pydantic_v1.Field() +class CommonModelScopeApi(UniversalBaseModel): + common_models: typing.List[IndividualCommonModelScopeDeserializer] = pydantic.Field() """ The common models you want to update the scopes for """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/common_model_scopes_body_request.py b/src/merge/resources/crm/types/common_model_scopes_body_request.py index e956cbdc..9098f031 100644 --- a/src/merge/resources/crm/types/common_model_scopes_body_request.py +++ b/src/merge/resources/crm/types/common_model_scopes_body_request.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .enabled_actions_enum import EnabledActionsEnum -class CommonModelScopesBodyRequest(pydantic_v1.BaseModel): +class CommonModelScopesBodyRequest(UniversalBaseModel): model_id: str enabled_actions: typing.List[EnabledActionsEnum] disabled_fields: typing.List[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/contact.py b/src/merge/resources/crm/types/contact.py index c1916f05..9ba91025 100644 --- a/src/merge/resources/crm/types/contact.py +++ b/src/merge/resources/crm/types/contact.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .address import Address from .contact_account import ContactAccount from .contact_owner import ContactOwner @@ -14,7 +15,7 @@ from .remote_field import RemoteField -class Contact(pydantic_v1.BaseModel): +class Contact(UniversalBaseModel): """ # The Contact Object @@ -28,37 +29,37 @@ class Contact(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - first_name: typing.Optional[str] = pydantic_v1.Field() + first_name: typing.Optional[str] = pydantic.Field() """ The contact's first name. """ - last_name: typing.Optional[str] = pydantic_v1.Field() + last_name: typing.Optional[str] = pydantic.Field() """ The contact's last name. """ - account: typing.Optional[ContactAccount] = pydantic_v1.Field() + account: typing.Optional[ContactAccount] = pydantic.Field() """ The contact's account. """ - owner: typing.Optional[ContactOwner] = pydantic_v1.Field() + owner: typing.Optional[ContactOwner] = pydantic.Field() """ The contact's owner. """ @@ -66,12 +67,12 @@ class Contact(pydantic_v1.BaseModel): addresses: typing.Optional[typing.List[Address]] email_addresses: typing.Optional[typing.List[EmailAddress]] phone_numbers: typing.Optional[typing.List[PhoneNumber]] - last_activity_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + last_activity_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the contact's last activity occurred. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's contact was created. """ @@ -81,20 +82,11 @@ class Contact(pydantic_v1.BaseModel): remote_data: typing.Optional[typing.List[RemoteData]] remote_fields: typing.Optional[typing.List[RemoteField]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/contact_request.py b/src/merge/resources/crm/types/contact_request.py index ef7b0827..4c64f37c 100644 --- a/src/merge/resources/crm/types/contact_request.py +++ b/src/merge/resources/crm/types/contact_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .address_request import AddressRequest from .contact_request_account import ContactRequestAccount from .contact_request_owner import ContactRequestOwner @@ -13,7 +14,7 @@ from .remote_field_request import RemoteFieldRequest -class ContactRequest(pydantic_v1.BaseModel): +class ContactRequest(UniversalBaseModel): """ # The Contact Object @@ -26,22 +27,22 @@ class ContactRequest(pydantic_v1.BaseModel): TODO """ - first_name: typing.Optional[str] = pydantic_v1.Field() + first_name: typing.Optional[str] = pydantic.Field() """ The contact's first name. """ - last_name: typing.Optional[str] = pydantic_v1.Field() + last_name: typing.Optional[str] = pydantic.Field() """ The contact's last name. """ - account: typing.Optional[ContactRequestAccount] = pydantic_v1.Field() + account: typing.Optional[ContactRequestAccount] = pydantic.Field() """ The contact's account. """ - owner: typing.Optional[ContactRequestOwner] = pydantic_v1.Field() + owner: typing.Optional[ContactRequestOwner] = pydantic.Field() """ The contact's owner. """ @@ -49,7 +50,7 @@ class ContactRequest(pydantic_v1.BaseModel): addresses: typing.Optional[typing.List[AddressRequest]] email_addresses: typing.Optional[typing.List[EmailAddressRequest]] phone_numbers: typing.Optional[typing.List[PhoneNumberRequest]] - last_activity_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + last_activity_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the contact's last activity occurred. """ @@ -58,20 +59,11 @@ class ContactRequest(pydantic_v1.BaseModel): linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] remote_fields: typing.Optional[typing.List[RemoteFieldRequest]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/crm_account_response.py b/src/merge/resources/crm/types/crm_account_response.py index 13e87b6d..ad73360b 100644 --- a/src/merge/resources/crm/types/crm_account_response.py +++ b/src/merge/resources/crm/types/crm_account_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account import Account from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .warning_validation_problem import WarningValidationProblem -class CrmAccountResponse(pydantic_v1.BaseModel): +class CrmAccountResponse(UniversalBaseModel): model: Account warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/crm_association_type_response.py b/src/merge/resources/crm/types/crm_association_type_response.py index f22834e6..65dcb5d0 100644 --- a/src/merge/resources/crm/types/crm_association_type_response.py +++ b/src/merge/resources/crm/types/crm_association_type_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .association_type import AssociationType from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .warning_validation_problem import WarningValidationProblem -class CrmAssociationTypeResponse(pydantic_v1.BaseModel): +class CrmAssociationTypeResponse(UniversalBaseModel): model: AssociationType warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/crm_contact_response.py b/src/merge/resources/crm/types/crm_contact_response.py index 9efa6e58..244f6cf5 100644 --- a/src/merge/resources/crm/types/crm_contact_response.py +++ b/src/merge/resources/crm/types/crm_contact_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .contact import Contact from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .warning_validation_problem import WarningValidationProblem -class CrmContactResponse(pydantic_v1.BaseModel): +class CrmContactResponse(UniversalBaseModel): model: Contact warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/crm_custom_object_response.py b/src/merge/resources/crm/types/crm_custom_object_response.py index 0d9d8849..634d18d6 100644 --- a/src/merge/resources/crm/types/crm_custom_object_response.py +++ b/src/merge/resources/crm/types/crm_custom_object_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .custom_object import CustomObject from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .warning_validation_problem import WarningValidationProblem -class CrmCustomObjectResponse(pydantic_v1.BaseModel): +class CrmCustomObjectResponse(UniversalBaseModel): model: CustomObject warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/custom_object.py b/src/merge/resources/crm/types/custom_object.py index 19fde14a..6cbf1301 100644 --- a/src/merge/resources/crm/types/custom_object.py +++ b/src/merge/resources/crm/types/custom_object.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_field import RemoteField -class CustomObject(pydantic_v1.BaseModel): +class CustomObject(UniversalBaseModel): """ # The CustomObject Object @@ -22,47 +23,38 @@ class CustomObject(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - object_class: typing.Optional[str] = pydantic_v1.Field() + object_class: typing.Optional[str] = pydantic.Field() """ The custom object class the custom object record belongs to. """ - fields: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + fields: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ The fields and values contained within the custom object record. """ remote_fields: typing.Optional[typing.List[RemoteField]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/custom_object_class.py b/src/merge/resources/crm/types/custom_object_class.py index 768eba43..068ff0e1 100644 --- a/src/merge/resources/crm/types/custom_object_class.py +++ b/src/merge/resources/crm/types/custom_object_class.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_field_class_for_custom_object_class import RemoteFieldClassForCustomObjectClass -class CustomObjectClass(pydantic_v1.BaseModel): +class CustomObjectClass(UniversalBaseModel): """ # The Custom Object Class Object @@ -22,48 +23,39 @@ class CustomObjectClass(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ created_at: typing.Optional[dt.datetime] modified_at: typing.Optional[dt.datetime] - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The custom object class's name. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The custom object class's description. """ - labels: typing.Optional[typing.Dict[str, typing.Optional[str]]] = pydantic_v1.Field() + labels: typing.Optional[typing.Dict[str, typing.Optional[str]]] = pydantic.Field() """ The custom object class's singular and plural labels. """ fields: typing.Optional[typing.List[RemoteFieldClassForCustomObjectClass]] - association_types: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] = pydantic_v1.Field() + association_types: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] = pydantic.Field() """ The types of associations with other models that the custom object class can have. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/custom_object_request.py b/src/merge/resources/crm/types/custom_object_request.py index f9fd9510..c7b46413 100644 --- a/src/merge/resources/crm/types/custom_object_request.py +++ b/src/merge/resources/crm/types/custom_object_request.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class CustomObjectRequest(pydantic_v1.BaseModel): - fields: typing.Dict[str, typing.Any] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class CustomObjectRequest(UniversalBaseModel): + fields: typing.Dict[str, typing.Any] - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/data_passthrough_request.py b/src/merge/resources/crm/types/data_passthrough_request.py index fdf9b18c..3f9ee090 100644 --- a/src/merge/resources/crm/types/data_passthrough_request.py +++ b/src/merge/resources/crm/types/data_passthrough_request.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .method_enum import MethodEnum from .multipart_form_field_request import MultipartFormFieldRequest from .request_format_enum import RequestFormatEnum -class DataPassthroughRequest(pydantic_v1.BaseModel): +class DataPassthroughRequest(UniversalBaseModel): """ # The DataPassthrough Object @@ -24,51 +24,42 @@ class DataPassthroughRequest(pydantic_v1.BaseModel): """ method: MethodEnum - path: str = pydantic_v1.Field() + path: str = pydantic.Field() """ The path of the request in the third party's platform. """ - base_url_override: typing.Optional[str] = pydantic_v1.Field() + base_url_override: typing.Optional[str] = pydantic.Field() """ An optional override of the third party's base url for the request. """ - data: typing.Optional[str] = pydantic_v1.Field() + data: typing.Optional[str] = pydantic.Field() """ The data with the request. You must include a `request_format` parameter matching the data's format """ - multipart_form_data: typing.Optional[typing.List[MultipartFormFieldRequest]] = pydantic_v1.Field() + multipart_form_data: typing.Optional[typing.List[MultipartFormFieldRequest]] = pydantic.Field() """ Pass an array of `MultipartFormField` objects in here instead of using the `data` param if `request_format` is set to `MULTIPART`. """ - headers: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + headers: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ The headers to use for the request (Merge will handle the account's authorization headers). `Content-Type` header is required for passthrough. Choose content type corresponding to expected format of receiving server. """ request_format: typing.Optional[RequestFormatEnum] - normalize_response: typing.Optional[bool] = pydantic_v1.Field() + normalize_response: typing.Optional[bool] = pydantic.Field() """ Optional. If true, the response will always be an object of the form `{"type": T, "value": ...}` where `T` will be one of `string, boolean, number, null, array, object`. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/debug_mode_log.py b/src/merge/resources/crm/types/debug_mode_log.py index 321c9090..8edea2ae 100644 --- a/src/merge/resources/crm/types/debug_mode_log.py +++ b/src/merge/resources/crm/types/debug_mode_log.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_model_log_summary import DebugModelLogSummary -class DebugModeLog(pydantic_v1.BaseModel): +class DebugModeLog(UniversalBaseModel): log_id: str dashboard_view: str log_summary: DebugModelLogSummary - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/debug_model_log_summary.py b/src/merge/resources/crm/types/debug_model_log_summary.py index 06bb154b..4f5b07db 100644 --- a/src/merge/resources/crm/types/debug_model_log_summary.py +++ b/src/merge/resources/crm/types/debug_model_log_summary.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class DebugModelLogSummary(pydantic_v1.BaseModel): + +class DebugModelLogSummary(UniversalBaseModel): url: str method: str status_code: int - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/email_address.py b/src/merge/resources/crm/types/email_address.py index 9e381c42..f29e620c 100644 --- a/src/merge/resources/crm/types/email_address.py +++ b/src/merge/resources/crm/types/email_address.py @@ -3,11 +3,12 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class EmailAddress(pydantic_v1.BaseModel): + +class EmailAddress(UniversalBaseModel): """ # The EmailAddress Object @@ -20,40 +21,31 @@ class EmailAddress(pydantic_v1.BaseModel): Fetch from the `GET Contact` endpoint and view their email addresses. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - email_address: typing.Optional[str] = pydantic_v1.Field() + email_address: typing.Optional[str] = pydantic.Field() """ The email address. """ - email_address_type: typing.Optional[str] = pydantic_v1.Field() + email_address_type: typing.Optional[str] = pydantic.Field() """ The email address's type. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/email_address_request.py b/src/merge/resources/crm/types/email_address_request.py index 9892f3af..12b336bd 100644 --- a/src/merge/resources/crm/types/email_address_request.py +++ b/src/merge/resources/crm/types/email_address_request.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class EmailAddressRequest(pydantic_v1.BaseModel): + +class EmailAddressRequest(UniversalBaseModel): """ # The EmailAddress Object @@ -20,12 +20,12 @@ class EmailAddressRequest(pydantic_v1.BaseModel): Fetch from the `GET Contact` endpoint and view their email addresses. """ - email_address: typing.Optional[str] = pydantic_v1.Field() + email_address: typing.Optional[str] = pydantic.Field() """ The email address. """ - email_address_type: typing.Optional[str] = pydantic_v1.Field() + email_address_type: typing.Optional[str] = pydantic.Field() """ The email address's type. """ @@ -33,20 +33,11 @@ class EmailAddressRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/engagement.py b/src/merge/resources/crm/types/engagement.py index 0e2ffade..3c7e5829 100644 --- a/src/merge/resources/crm/types/engagement.py +++ b/src/merge/resources/crm/types/engagement.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .engagement_account import EngagementAccount from .engagement_contacts_item import EngagementContactsItem from .engagement_direction import EngagementDirection @@ -14,7 +15,7 @@ from .remote_field import RemoteField -class Engagement(pydantic_v1.BaseModel): +class Engagement(UniversalBaseModel): """ # The Engagement Object @@ -28,37 +29,37 @@ class Engagement(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - owner: typing.Optional[EngagementOwner] = pydantic_v1.Field() + owner: typing.Optional[EngagementOwner] = pydantic.Field() """ The engagement's owner. """ - content: typing.Optional[str] = pydantic_v1.Field() + content: typing.Optional[str] = pydantic.Field() """ The engagement's content. """ - subject: typing.Optional[str] = pydantic_v1.Field() + subject: typing.Optional[str] = pydantic.Field() """ The engagement's subject. """ - direction: typing.Optional[EngagementDirection] = pydantic_v1.Field() + direction: typing.Optional[EngagementDirection] = pydantic.Field() """ The engagement's direction. @@ -66,28 +67,28 @@ class Engagement(pydantic_v1.BaseModel): - `OUTBOUND` - OUTBOUND """ - engagement_type: typing.Optional[EngagementEngagementType] = pydantic_v1.Field() + engagement_type: typing.Optional[EngagementEngagementType] = pydantic.Field() """ The engagement type of the engagement. """ - start_time: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_time: typing.Optional[dt.datetime] = pydantic.Field() """ The time at which the engagement started. """ - end_time: typing.Optional[dt.datetime] = pydantic_v1.Field() + end_time: typing.Optional[dt.datetime] = pydantic.Field() """ The time at which the engagement ended. """ - account: typing.Optional[EngagementAccount] = pydantic_v1.Field() + account: typing.Optional[EngagementAccount] = pydantic.Field() """ The account of the engagement. """ contacts: typing.Optional[typing.List[typing.Optional[EngagementContactsItem]]] - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -96,20 +97,11 @@ class Engagement(pydantic_v1.BaseModel): remote_data: typing.Optional[typing.List[RemoteData]] remote_fields: typing.Optional[typing.List[RemoteField]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/engagement_request.py b/src/merge/resources/crm/types/engagement_request.py index df8418cf..90be13db 100644 --- a/src/merge/resources/crm/types/engagement_request.py +++ b/src/merge/resources/crm/types/engagement_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .engagement_request_account import EngagementRequestAccount from .engagement_request_contacts_item import EngagementRequestContactsItem from .engagement_request_direction import EngagementRequestDirection @@ -13,7 +14,7 @@ from .remote_field_request import RemoteFieldRequest -class EngagementRequest(pydantic_v1.BaseModel): +class EngagementRequest(UniversalBaseModel): """ # The Engagement Object @@ -26,22 +27,22 @@ class EngagementRequest(pydantic_v1.BaseModel): TODO """ - owner: typing.Optional[EngagementRequestOwner] = pydantic_v1.Field() + owner: typing.Optional[EngagementRequestOwner] = pydantic.Field() """ The engagement's owner. """ - content: typing.Optional[str] = pydantic_v1.Field() + content: typing.Optional[str] = pydantic.Field() """ The engagement's content. """ - subject: typing.Optional[str] = pydantic_v1.Field() + subject: typing.Optional[str] = pydantic.Field() """ The engagement's subject. """ - direction: typing.Optional[EngagementRequestDirection] = pydantic_v1.Field() + direction: typing.Optional[EngagementRequestDirection] = pydantic.Field() """ The engagement's direction. @@ -49,22 +50,22 @@ class EngagementRequest(pydantic_v1.BaseModel): - `OUTBOUND` - OUTBOUND """ - engagement_type: typing.Optional[EngagementRequestEngagementType] = pydantic_v1.Field() + engagement_type: typing.Optional[EngagementRequestEngagementType] = pydantic.Field() """ The engagement type of the engagement. """ - start_time: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_time: typing.Optional[dt.datetime] = pydantic.Field() """ The time at which the engagement started. """ - end_time: typing.Optional[dt.datetime] = pydantic_v1.Field() + end_time: typing.Optional[dt.datetime] = pydantic.Field() """ The time at which the engagement ended. """ - account: typing.Optional[EngagementRequestAccount] = pydantic_v1.Field() + account: typing.Optional[EngagementRequestAccount] = pydantic.Field() """ The account of the engagement. """ @@ -74,20 +75,11 @@ class EngagementRequest(pydantic_v1.BaseModel): linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] remote_fields: typing.Optional[typing.List[RemoteFieldRequest]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/engagement_response.py b/src/merge/resources/crm/types/engagement_response.py index e316f2df..86daf473 100644 --- a/src/merge/resources/crm/types/engagement_response.py +++ b/src/merge/resources/crm/types/engagement_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .engagement import Engagement from .error_validation_problem import ErrorValidationProblem from .warning_validation_problem import WarningValidationProblem -class EngagementResponse(pydantic_v1.BaseModel): +class EngagementResponse(UniversalBaseModel): model: Engagement warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/engagement_type.py b/src/merge/resources/crm/types/engagement_type.py index fae7975a..53ea4311 100644 --- a/src/merge/resources/crm/types/engagement_type.py +++ b/src/merge/resources/crm/types/engagement_type.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .engagement_type_activity_type import EngagementTypeActivityType from .remote_field import RemoteField -class EngagementType(pydantic_v1.BaseModel): +class EngagementType(UniversalBaseModel): """ # The Engagement Type Object @@ -23,22 +24,22 @@ class EngagementType(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - activity_type: typing.Optional[EngagementTypeActivityType] = pydantic_v1.Field() + activity_type: typing.Optional[EngagementTypeActivityType] = pydantic.Field() """ The engagement type's activity type. @@ -47,27 +48,18 @@ class EngagementType(pydantic_v1.BaseModel): - `EMAIL` - EMAIL """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The engagement type's name. """ remote_fields: typing.Optional[typing.List[RemoteField]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/error_validation_problem.py b/src/merge/resources/crm/types/error_validation_problem.py index 425af45c..3838491d 100644 --- a/src/merge/resources/crm/types/error_validation_problem.py +++ b/src/merge/resources/crm/types/error_validation_problem.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .validation_problem_source import ValidationProblemSource -class ErrorValidationProblem(pydantic_v1.BaseModel): +class ErrorValidationProblem(UniversalBaseModel): source: typing.Optional[ValidationProblemSource] title: str detail: str problem_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/external_target_field_api.py b/src/merge/resources/crm/types/external_target_field_api.py index a97d536a..8a971c64 100644 --- a/src/merge/resources/crm/types/external_target_field_api.py +++ b/src/merge/resources/crm/types/external_target_field_api.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ExternalTargetFieldApi(pydantic_v1.BaseModel): + +class ExternalTargetFieldApi(UniversalBaseModel): name: typing.Optional[str] description: typing.Optional[str] is_mapped: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/external_target_field_api_response.py b/src/merge/resources/crm/types/external_target_field_api_response.py index ff73a940..1aac3b41 100644 --- a/src/merge/resources/crm/types/external_target_field_api_response.py +++ b/src/merge/resources/crm/types/external_target_field_api_response.py @@ -1,40 +1,29 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .external_target_field_api import ExternalTargetFieldApi - - -class ExternalTargetFieldApiResponse(pydantic_v1.BaseModel): - account: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Account") - contact: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Contact") - lead: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Lead") - note: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Note") - opportunity: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Opportunity") - stage: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Stage") - user: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="User") - task: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Task") - engagement: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Engagement") +import pydantic - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .external_target_field_api import ExternalTargetFieldApi - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} +class ExternalTargetFieldApiResponse(UniversalBaseModel): + account: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Account") + contact: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Contact") + lead: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Lead") + note: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Note") + opportunity: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Opportunity") + stage: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Stage") + user: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="User") + task: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Task") + engagement: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Engagement") + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/field_mapping_api_instance.py b/src/merge/resources/crm/types/field_mapping_api_instance.py index d9d7670d..8af85a52 100644 --- a/src/merge/resources/crm/types/field_mapping_api_instance.py +++ b/src/merge/resources/crm/types/field_mapping_api_instance.py @@ -1,34 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_mapping_api_instance_remote_field import FieldMappingApiInstanceRemoteField from .field_mapping_api_instance_target_field import FieldMappingApiInstanceTargetField -class FieldMappingApiInstance(pydantic_v1.BaseModel): +class FieldMappingApiInstance(UniversalBaseModel): id: typing.Optional[str] is_integration_wide: typing.Optional[bool] target_field: typing.Optional[FieldMappingApiInstanceTargetField] remote_field: typing.Optional[FieldMappingApiInstanceRemoteField] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/field_mapping_api_instance_remote_field.py b/src/merge/resources/crm/types/field_mapping_api_instance_remote_field.py index 9539d3dc..0635b76f 100644 --- a/src/merge/resources/crm/types/field_mapping_api_instance_remote_field.py +++ b/src/merge/resources/crm/types/field_mapping_api_instance_remote_field.py @@ -1,36 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_mapping_api_instance_remote_field_remote_endpoint_info import ( FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo, ) -class FieldMappingApiInstanceRemoteField(pydantic_v1.BaseModel): +class FieldMappingApiInstanceRemoteField(UniversalBaseModel): remote_key_name: str - schema_: typing.Dict[str, typing.Any] = pydantic_v1.Field(alias="schema") + schema_: typing.Dict[str, typing.Any] = pydantic.Field(alias="schema") remote_endpoint_info: FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py b/src/merge/resources/crm/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py index d9fcc276..e34eb6e4 100644 --- a/src/merge/resources/crm/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py +++ b/src/merge/resources/crm/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo(pydantic_v1.BaseModel): + +class FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo(UniversalBaseModel): method: typing.Optional[str] url_path: typing.Optional[str] field_traversal_path: typing.Optional[typing.List[str]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/field_mapping_api_instance_response.py b/src/merge/resources/crm/types/field_mapping_api_instance_response.py index 66614cfa..d688164f 100644 --- a/src/merge/resources/crm/types/field_mapping_api_instance_response.py +++ b/src/merge/resources/crm/types/field_mapping_api_instance_response.py @@ -1,40 +1,29 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .field_mapping_api_instance import FieldMappingApiInstance - - -class FieldMappingApiInstanceResponse(pydantic_v1.BaseModel): - account: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Account") - contact: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Contact") - lead: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Lead") - note: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Note") - opportunity: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Opportunity") - stage: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Stage") - user: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="User") - task: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Task") - engagement: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Engagement") +import pydantic - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .field_mapping_api_instance import FieldMappingApiInstance - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} +class FieldMappingApiInstanceResponse(UniversalBaseModel): + account: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Account") + contact: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Contact") + lead: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Lead") + note: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Note") + opportunity: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Opportunity") + stage: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Stage") + user: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="User") + task: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Task") + engagement: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Engagement") + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/field_mapping_api_instance_target_field.py b/src/merge/resources/crm/types/field_mapping_api_instance_target_field.py index 25a8dcff..c590d4ce 100644 --- a/src/merge/resources/crm/types/field_mapping_api_instance_target_field.py +++ b/src/merge/resources/crm/types/field_mapping_api_instance_target_field.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldMappingApiInstanceTargetField(pydantic_v1.BaseModel): + +class FieldMappingApiInstanceTargetField(UniversalBaseModel): name: str description: str is_organization_wide: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/field_mapping_instance_response.py b/src/merge/resources/crm/types/field_mapping_instance_response.py index b55d2c40..aaf06f0e 100644 --- a/src/merge/resources/crm/types/field_mapping_instance_response.py +++ b/src/merge/resources/crm/types/field_mapping_instance_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .field_mapping_api_instance import FieldMappingApiInstance from .warning_validation_problem import WarningValidationProblem -class FieldMappingInstanceResponse(pydantic_v1.BaseModel): +class FieldMappingInstanceResponse(UniversalBaseModel): model: FieldMappingApiInstance warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/field_permission_deserializer.py b/src/merge/resources/crm/types/field_permission_deserializer.py index 124f3deb..ed80b9d6 100644 --- a/src/merge/resources/crm/types/field_permission_deserializer.py +++ b/src/merge/resources/crm/types/field_permission_deserializer.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldPermissionDeserializer(pydantic_v1.BaseModel): + +class FieldPermissionDeserializer(UniversalBaseModel): enabled: typing.Optional[typing.List[typing.Any]] disabled: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/field_permission_deserializer_request.py b/src/merge/resources/crm/types/field_permission_deserializer_request.py index 65e80e75..e937e743 100644 --- a/src/merge/resources/crm/types/field_permission_deserializer_request.py +++ b/src/merge/resources/crm/types/field_permission_deserializer_request.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldPermissionDeserializerRequest(pydantic_v1.BaseModel): + +class FieldPermissionDeserializerRequest(UniversalBaseModel): enabled: typing.Optional[typing.List[typing.Any]] disabled: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/ignore_common_model_request.py b/src/merge/resources/crm/types/ignore_common_model_request.py index b3c84ced..50eebec8 100644 --- a/src/merge/resources/crm/types/ignore_common_model_request.py +++ b/src/merge/resources/crm/types/ignore_common_model_request.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .reason_enum import ReasonEnum -class IgnoreCommonModelRequest(pydantic_v1.BaseModel): +class IgnoreCommonModelRequest(UniversalBaseModel): reason: ReasonEnum message: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/individual_common_model_scope_deserializer.py b/src/merge/resources/crm/types/individual_common_model_scope_deserializer.py index d80ca06e..ffa55055 100644 --- a/src/merge/resources/crm/types/individual_common_model_scope_deserializer.py +++ b/src/merge/resources/crm/types/individual_common_model_scope_deserializer.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_permission_deserializer import FieldPermissionDeserializer from .model_permission_deserializer import ModelPermissionDeserializer -class IndividualCommonModelScopeDeserializer(pydantic_v1.BaseModel): +class IndividualCommonModelScopeDeserializer(UniversalBaseModel): model_name: str model_permissions: typing.Optional[typing.Dict[str, ModelPermissionDeserializer]] field_permissions: typing.Optional[FieldPermissionDeserializer] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/individual_common_model_scope_deserializer_request.py b/src/merge/resources/crm/types/individual_common_model_scope_deserializer_request.py index 8f2e7de5..d0e68f6d 100644 --- a/src/merge/resources/crm/types/individual_common_model_scope_deserializer_request.py +++ b/src/merge/resources/crm/types/individual_common_model_scope_deserializer_request.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_permission_deserializer_request import FieldPermissionDeserializerRequest from .model_permission_deserializer_request import ModelPermissionDeserializerRequest -class IndividualCommonModelScopeDeserializerRequest(pydantic_v1.BaseModel): +class IndividualCommonModelScopeDeserializerRequest(UniversalBaseModel): model_name: str model_permissions: typing.Optional[typing.Dict[str, ModelPermissionDeserializerRequest]] field_permissions: typing.Optional[FieldPermissionDeserializerRequest] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/issue.py b/src/merge/resources/crm/types/issue.py index 086a0db2..28366731 100644 --- a/src/merge/resources/crm/types/issue.py +++ b/src/merge/resources/crm/types/issue.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .issue_status import IssueStatus -class Issue(pydantic_v1.BaseModel): +class Issue(UniversalBaseModel): id: typing.Optional[str] - status: typing.Optional[IssueStatus] = pydantic_v1.Field() + status: typing.Optional[IssueStatus] = pydantic.Field() """ Status of the issue. Options: ('ONGOING', 'RESOLVED') @@ -25,20 +26,11 @@ class Issue(pydantic_v1.BaseModel): is_muted: typing.Optional[bool] error_details: typing.Optional[typing.List[str]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/item_schema.py b/src/merge/resources/crm/types/item_schema.py index 45059fd0..65f321ef 100644 --- a/src/merge/resources/crm/types/item_schema.py +++ b/src/merge/resources/crm/types/item_schema.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .item_format_enum import ItemFormatEnum from .item_type_enum import ItemTypeEnum -class ItemSchema(pydantic_v1.BaseModel): +class ItemSchema(UniversalBaseModel): item_type: typing.Optional[ItemTypeEnum] item_format: typing.Optional[ItemFormatEnum] item_choices: typing.Optional[typing.List[str]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/lead.py b/src/merge/resources/crm/types/lead.py index b1faad46..b9bb2352 100644 --- a/src/merge/resources/crm/types/lead.py +++ b/src/merge/resources/crm/types/lead.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .address import Address from .email_address import EmailAddress from .lead_converted_account import LeadConvertedAccount @@ -15,7 +16,7 @@ from .remote_field import RemoteField -class Lead(pydantic_v1.BaseModel): +class Lead(UniversalBaseModel): """ # The Lead Object @@ -29,47 +30,47 @@ class Lead(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - owner: typing.Optional[LeadOwner] = pydantic_v1.Field() + owner: typing.Optional[LeadOwner] = pydantic.Field() """ The lead's owner. """ - lead_source: typing.Optional[str] = pydantic_v1.Field() + lead_source: typing.Optional[str] = pydantic.Field() """ The lead's source. """ - title: typing.Optional[str] = pydantic_v1.Field() + title: typing.Optional[str] = pydantic.Field() """ The lead's title. """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The lead's company. """ - first_name: typing.Optional[str] = pydantic_v1.Field() + first_name: typing.Optional[str] = pydantic.Field() """ The lead's first name. """ - last_name: typing.Optional[str] = pydantic_v1.Field() + last_name: typing.Optional[str] = pydantic.Field() """ The lead's last name. """ @@ -77,27 +78,27 @@ class Lead(pydantic_v1.BaseModel): addresses: typing.Optional[typing.List[Address]] email_addresses: typing.Optional[typing.List[EmailAddress]] phone_numbers: typing.Optional[typing.List[PhoneNumber]] - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's lead was updated. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's lead was created. """ - converted_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + converted_date: typing.Optional[dt.datetime] = pydantic.Field() """ When the lead was converted. """ - converted_contact: typing.Optional[LeadConvertedContact] = pydantic_v1.Field() + converted_contact: typing.Optional[LeadConvertedContact] = pydantic.Field() """ The contact of the converted lead. """ - converted_account: typing.Optional[LeadConvertedAccount] = pydantic_v1.Field() + converted_account: typing.Optional[LeadConvertedAccount] = pydantic.Field() """ The account of the converted lead. """ @@ -107,20 +108,11 @@ class Lead(pydantic_v1.BaseModel): remote_data: typing.Optional[typing.List[RemoteData]] remote_fields: typing.Optional[typing.List[RemoteField]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/lead_request.py b/src/merge/resources/crm/types/lead_request.py index 4f3ef7df..c15161b2 100644 --- a/src/merge/resources/crm/types/lead_request.py +++ b/src/merge/resources/crm/types/lead_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .address_request import AddressRequest from .email_address_request import EmailAddressRequest from .lead_request_converted_account import LeadRequestConvertedAccount @@ -14,7 +15,7 @@ from .remote_field_request import RemoteFieldRequest -class LeadRequest(pydantic_v1.BaseModel): +class LeadRequest(UniversalBaseModel): """ # The Lead Object @@ -27,32 +28,32 @@ class LeadRequest(pydantic_v1.BaseModel): TODO """ - owner: typing.Optional[LeadRequestOwner] = pydantic_v1.Field() + owner: typing.Optional[LeadRequestOwner] = pydantic.Field() """ The lead's owner. """ - lead_source: typing.Optional[str] = pydantic_v1.Field() + lead_source: typing.Optional[str] = pydantic.Field() """ The lead's source. """ - title: typing.Optional[str] = pydantic_v1.Field() + title: typing.Optional[str] = pydantic.Field() """ The lead's title. """ - company: typing.Optional[str] = pydantic_v1.Field() + company: typing.Optional[str] = pydantic.Field() """ The lead's company. """ - first_name: typing.Optional[str] = pydantic_v1.Field() + first_name: typing.Optional[str] = pydantic.Field() """ The lead's first name. """ - last_name: typing.Optional[str] = pydantic_v1.Field() + last_name: typing.Optional[str] = pydantic.Field() """ The lead's last name. """ @@ -60,17 +61,17 @@ class LeadRequest(pydantic_v1.BaseModel): addresses: typing.Optional[typing.List[AddressRequest]] email_addresses: typing.Optional[typing.List[EmailAddressRequest]] phone_numbers: typing.Optional[typing.List[PhoneNumberRequest]] - converted_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + converted_date: typing.Optional[dt.datetime] = pydantic.Field() """ When the lead was converted. """ - converted_contact: typing.Optional[LeadRequestConvertedContact] = pydantic_v1.Field() + converted_contact: typing.Optional[LeadRequestConvertedContact] = pydantic.Field() """ The contact of the converted lead. """ - converted_account: typing.Optional[LeadRequestConvertedAccount] = pydantic_v1.Field() + converted_account: typing.Optional[LeadRequestConvertedAccount] = pydantic.Field() """ The account of the converted lead. """ @@ -79,20 +80,11 @@ class LeadRequest(pydantic_v1.BaseModel): linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] remote_fields: typing.Optional[typing.List[RemoteFieldRequest]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/lead_response.py b/src/merge/resources/crm/types/lead_response.py index 2f4ed2bc..f2a7bd2f 100644 --- a/src/merge/resources/crm/types/lead_response.py +++ b/src/merge/resources/crm/types/lead_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .lead import Lead from .warning_validation_problem import WarningValidationProblem -class LeadResponse(pydantic_v1.BaseModel): +class LeadResponse(UniversalBaseModel): model: Lead warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/link_token.py b/src/merge/resources/crm/types/link_token.py index 1c82d1ac..87c88faf 100644 --- a/src/merge/resources/crm/types/link_token.py +++ b/src/merge/resources/crm/types/link_token.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class LinkToken(pydantic_v1.BaseModel): + +class LinkToken(UniversalBaseModel): link_token: str integration_name: typing.Optional[str] magic_link_url: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/linked_account_status.py b/src/merge/resources/crm/types/linked_account_status.py index 60e21a98..34184012 100644 --- a/src/merge/resources/crm/types/linked_account_status.py +++ b/src/merge/resources/crm/types/linked_account_status.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class LinkedAccountStatus(pydantic_v1.BaseModel): + +class LinkedAccountStatus(UniversalBaseModel): linked_account_status: str can_make_request: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/meta_response.py b/src/merge/resources/crm/types/meta_response.py index debaf4ef..27e02126 100644 --- a/src/merge/resources/crm/types/meta_response.py +++ b/src/merge/resources/crm/types/meta_response.py @@ -1,34 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .linked_account_status import LinkedAccountStatus -class MetaResponse(pydantic_v1.BaseModel): +class MetaResponse(UniversalBaseModel): request_schema: typing.Dict[str, typing.Any] remote_field_classes: typing.Optional[typing.Dict[str, typing.Any]] status: typing.Optional[LinkedAccountStatus] has_conditional_params: bool has_required_linked_account_params: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/model_operation.py b/src/merge/resources/crm/types/model_operation.py index 0f4429ec..efe8355e 100644 --- a/src/merge/resources/crm/types/model_operation.py +++ b/src/merge/resources/crm/types/model_operation.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelOperation(pydantic_v1.BaseModel): + +class ModelOperation(UniversalBaseModel): """ # The ModelOperation Object @@ -25,20 +25,11 @@ class ModelOperation(pydantic_v1.BaseModel): required_post_parameters: typing.List[str] supported_fields: typing.List[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/model_permission_deserializer.py b/src/merge/resources/crm/types/model_permission_deserializer.py index 5a6adf20..14bc4f99 100644 --- a/src/merge/resources/crm/types/model_permission_deserializer.py +++ b/src/merge/resources/crm/types/model_permission_deserializer.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelPermissionDeserializer(pydantic_v1.BaseModel): - is_enabled: typing.Optional[bool] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ModelPermissionDeserializer(UniversalBaseModel): + is_enabled: typing.Optional[bool] - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/model_permission_deserializer_request.py b/src/merge/resources/crm/types/model_permission_deserializer_request.py index 3f72b9ac..cc2e7f77 100644 --- a/src/merge/resources/crm/types/model_permission_deserializer_request.py +++ b/src/merge/resources/crm/types/model_permission_deserializer_request.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelPermissionDeserializerRequest(pydantic_v1.BaseModel): - is_enabled: typing.Optional[bool] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ModelPermissionDeserializerRequest(UniversalBaseModel): + is_enabled: typing.Optional[bool] - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/multipart_form_field_request.py b/src/merge/resources/crm/types/multipart_form_field_request.py index 9c8ffb21..b6a6c708 100644 --- a/src/merge/resources/crm/types/multipart_form_field_request.py +++ b/src/merge/resources/crm/types/multipart_form_field_request.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .multipart_form_field_request_encoding import MultipartFormFieldRequestEncoding -class MultipartFormFieldRequest(pydantic_v1.BaseModel): +class MultipartFormFieldRequest(UniversalBaseModel): """ # The MultipartFormField Object @@ -21,17 +21,17 @@ class MultipartFormFieldRequest(pydantic_v1.BaseModel): Create a `MultipartFormField` to define a multipart form entry. """ - name: str = pydantic_v1.Field() + name: str = pydantic.Field() """ The name of the form field """ - data: str = pydantic_v1.Field() + data: str = pydantic.Field() """ The data for the form field. """ - encoding: typing.Optional[MultipartFormFieldRequestEncoding] = pydantic_v1.Field() + encoding: typing.Optional[MultipartFormFieldRequestEncoding] = pydantic.Field() """ The encoding of the value of `data`. Defaults to `RAW` if not defined. @@ -40,30 +40,21 @@ class MultipartFormFieldRequest(pydantic_v1.BaseModel): - `GZIP_BASE64` - GZIP_BASE64 """ - file_name: typing.Optional[str] = pydantic_v1.Field() + file_name: typing.Optional[str] = pydantic.Field() """ The file name of the form field, if the field is for a file. """ - content_type: typing.Optional[str] = pydantic_v1.Field() + content_type: typing.Optional[str] = pydantic.Field() """ The MIME type of the file, if the field is for a file. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/note.py b/src/merge/resources/crm/types/note.py index cfdc9f29..a20e3490 100644 --- a/src/merge/resources/crm/types/note.py +++ b/src/merge/resources/crm/types/note.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .note_account import NoteAccount from .note_contact import NoteContact from .note_opportunity import NoteOpportunity @@ -13,7 +14,7 @@ from .remote_field import RemoteField -class Note(pydantic_v1.BaseModel): +class Note(UniversalBaseModel): """ # The Note Object @@ -27,52 +28,52 @@ class Note(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - owner: typing.Optional[NoteOwner] = pydantic_v1.Field() + owner: typing.Optional[NoteOwner] = pydantic.Field() """ The note's owner. """ - content: typing.Optional[str] = pydantic_v1.Field() + content: typing.Optional[str] = pydantic.Field() """ The note's content. """ - contact: typing.Optional[NoteContact] = pydantic_v1.Field() + contact: typing.Optional[NoteContact] = pydantic.Field() """ The note's contact. """ - account: typing.Optional[NoteAccount] = pydantic_v1.Field() + account: typing.Optional[NoteAccount] = pydantic.Field() """ The note's account. """ - opportunity: typing.Optional[NoteOpportunity] = pydantic_v1.Field() + opportunity: typing.Optional[NoteOpportunity] = pydantic.Field() """ The note's opportunity. """ - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's lead was updated. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's lead was created. """ @@ -82,20 +83,11 @@ class Note(pydantic_v1.BaseModel): remote_data: typing.Optional[typing.List[RemoteData]] remote_fields: typing.Optional[typing.List[RemoteField]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/note_request.py b/src/merge/resources/crm/types/note_request.py index b93d31ca..d42e3e22 100644 --- a/src/merge/resources/crm/types/note_request.py +++ b/src/merge/resources/crm/types/note_request.py @@ -1,10 +1,10 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .note_request_account import NoteRequestAccount from .note_request_contact import NoteRequestContact from .note_request_opportunity import NoteRequestOpportunity @@ -12,7 +12,7 @@ from .remote_field_request import RemoteFieldRequest -class NoteRequest(pydantic_v1.BaseModel): +class NoteRequest(UniversalBaseModel): """ # The Note Object @@ -25,27 +25,27 @@ class NoteRequest(pydantic_v1.BaseModel): TODO """ - owner: typing.Optional[NoteRequestOwner] = pydantic_v1.Field() + owner: typing.Optional[NoteRequestOwner] = pydantic.Field() """ The note's owner. """ - content: typing.Optional[str] = pydantic_v1.Field() + content: typing.Optional[str] = pydantic.Field() """ The note's content. """ - contact: typing.Optional[NoteRequestContact] = pydantic_v1.Field() + contact: typing.Optional[NoteRequestContact] = pydantic.Field() """ The note's contact. """ - account: typing.Optional[NoteRequestAccount] = pydantic_v1.Field() + account: typing.Optional[NoteRequestAccount] = pydantic.Field() """ The note's account. """ - opportunity: typing.Optional[NoteRequestOpportunity] = pydantic_v1.Field() + opportunity: typing.Optional[NoteRequestOpportunity] = pydantic.Field() """ The note's opportunity. """ @@ -54,20 +54,11 @@ class NoteRequest(pydantic_v1.BaseModel): linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] remote_fields: typing.Optional[typing.List[RemoteFieldRequest]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/note_response.py b/src/merge/resources/crm/types/note_response.py index 42d03987..44073d85 100644 --- a/src/merge/resources/crm/types/note_response.py +++ b/src/merge/resources/crm/types/note_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .note import Note from .warning_validation_problem import WarningValidationProblem -class NoteResponse(pydantic_v1.BaseModel): +class NoteResponse(UniversalBaseModel): model: Note warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/object_class_description_request.py b/src/merge/resources/crm/types/object_class_description_request.py index 903a4803..fc7a6237 100644 --- a/src/merge/resources/crm/types/object_class_description_request.py +++ b/src/merge/resources/crm/types/object_class_description_request.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .origin_type_enum import OriginTypeEnum -class ObjectClassDescriptionRequest(pydantic_v1.BaseModel): +class ObjectClassDescriptionRequest(UniversalBaseModel): id: str origin_type: OriginTypeEnum - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/opportunity.py b/src/merge/resources/crm/types/opportunity.py index 0b3bca96..c2946e29 100644 --- a/src/merge/resources/crm/types/opportunity.py +++ b/src/merge/resources/crm/types/opportunity.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .opportunity_account import OpportunityAccount from .opportunity_owner import OpportunityOwner from .opportunity_stage import OpportunityStage @@ -13,7 +14,7 @@ from .remote_field import RemoteField -class Opportunity(pydantic_v1.BaseModel): +class Opportunity(UniversalBaseModel): """ # The Opportunity Object @@ -27,52 +28,52 @@ class Opportunity(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The opportunity's name. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The opportunity's description. """ - amount: typing.Optional[int] = pydantic_v1.Field() + amount: typing.Optional[int] = pydantic.Field() """ The opportunity's amount. """ - owner: typing.Optional[OpportunityOwner] = pydantic_v1.Field() + owner: typing.Optional[OpportunityOwner] = pydantic.Field() """ The opportunity's owner. """ - account: typing.Optional[OpportunityAccount] = pydantic_v1.Field() + account: typing.Optional[OpportunityAccount] = pydantic.Field() """ The account of the opportunity. """ - stage: typing.Optional[OpportunityStage] = pydantic_v1.Field() + stage: typing.Optional[OpportunityStage] = pydantic.Field() """ The stage of the opportunity. """ - status: typing.Optional[OpportunityStatus] = pydantic_v1.Field() + status: typing.Optional[OpportunityStatus] = pydantic.Field() """ The opportunity's status. @@ -81,17 +82,17 @@ class Opportunity(pydantic_v1.BaseModel): - `LOST` - LOST """ - last_activity_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + last_activity_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the opportunity's last activity occurred. """ - close_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + close_date: typing.Optional[dt.datetime] = pydantic.Field() """ When the opportunity was closed. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's opportunity was created. """ @@ -101,20 +102,11 @@ class Opportunity(pydantic_v1.BaseModel): remote_data: typing.Optional[typing.List[RemoteData]] remote_fields: typing.Optional[typing.List[RemoteField]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/opportunity_request.py b/src/merge/resources/crm/types/opportunity_request.py index e3344855..579e1f51 100644 --- a/src/merge/resources/crm/types/opportunity_request.py +++ b/src/merge/resources/crm/types/opportunity_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .opportunity_request_account import OpportunityRequestAccount from .opportunity_request_owner import OpportunityRequestOwner from .opportunity_request_stage import OpportunityRequestStage @@ -12,7 +13,7 @@ from .remote_field_request import RemoteFieldRequest -class OpportunityRequest(pydantic_v1.BaseModel): +class OpportunityRequest(UniversalBaseModel): """ # The Opportunity Object @@ -25,37 +26,37 @@ class OpportunityRequest(pydantic_v1.BaseModel): TODO """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The opportunity's name. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The opportunity's description. """ - amount: typing.Optional[int] = pydantic_v1.Field() + amount: typing.Optional[int] = pydantic.Field() """ The opportunity's amount. """ - owner: typing.Optional[OpportunityRequestOwner] = pydantic_v1.Field() + owner: typing.Optional[OpportunityRequestOwner] = pydantic.Field() """ The opportunity's owner. """ - account: typing.Optional[OpportunityRequestAccount] = pydantic_v1.Field() + account: typing.Optional[OpportunityRequestAccount] = pydantic.Field() """ The account of the opportunity. """ - stage: typing.Optional[OpportunityRequestStage] = pydantic_v1.Field() + stage: typing.Optional[OpportunityRequestStage] = pydantic.Field() """ The stage of the opportunity. """ - status: typing.Optional[OpportunityRequestStatus] = pydantic_v1.Field() + status: typing.Optional[OpportunityRequestStatus] = pydantic.Field() """ The opportunity's status. @@ -64,12 +65,12 @@ class OpportunityRequest(pydantic_v1.BaseModel): - `LOST` - LOST """ - last_activity_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + last_activity_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the opportunity's last activity occurred. """ - close_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + close_date: typing.Optional[dt.datetime] = pydantic.Field() """ When the opportunity was closed. """ @@ -78,20 +79,11 @@ class OpportunityRequest(pydantic_v1.BaseModel): linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] remote_fields: typing.Optional[typing.List[RemoteFieldRequest]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/opportunity_response.py b/src/merge/resources/crm/types/opportunity_response.py index 34d89d2c..01a032f3 100644 --- a/src/merge/resources/crm/types/opportunity_response.py +++ b/src/merge/resources/crm/types/opportunity_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .opportunity import Opportunity from .warning_validation_problem import WarningValidationProblem -class OpportunityResponse(pydantic_v1.BaseModel): +class OpportunityResponse(UniversalBaseModel): model: Opportunity warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_account_details_and_actions_list.py b/src/merge/resources/crm/types/paginated_account_details_and_actions_list.py index 280100c4..07323330 100644 --- a/src/merge/resources/crm/types/paginated_account_details_and_actions_list.py +++ b/src/merge/resources/crm/types/paginated_account_details_and_actions_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_details_and_actions import AccountDetailsAndActions -class PaginatedAccountDetailsAndActionsList(pydantic_v1.BaseModel): +class PaginatedAccountDetailsAndActionsList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[AccountDetailsAndActions]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_account_list.py b/src/merge/resources/crm/types/paginated_account_list.py index 21d2cda1..d9be284b 100644 --- a/src/merge/resources/crm/types/paginated_account_list.py +++ b/src/merge/resources/crm/types/paginated_account_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account import Account -class PaginatedAccountList(pydantic_v1.BaseModel): +class PaginatedAccountList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Account]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_association_list.py b/src/merge/resources/crm/types/paginated_association_list.py index fe6dda0f..594c319c 100644 --- a/src/merge/resources/crm/types/paginated_association_list.py +++ b/src/merge/resources/crm/types/paginated_association_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .association import Association -class PaginatedAssociationList(pydantic_v1.BaseModel): +class PaginatedAssociationList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Association]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_association_type_list.py b/src/merge/resources/crm/types/paginated_association_type_list.py index 58f2b1bf..f2180c0a 100644 --- a/src/merge/resources/crm/types/paginated_association_type_list.py +++ b/src/merge/resources/crm/types/paginated_association_type_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .association_type import AssociationType -class PaginatedAssociationTypeList(pydantic_v1.BaseModel): +class PaginatedAssociationTypeList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[AssociationType]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_audit_log_event_list.py b/src/merge/resources/crm/types/paginated_audit_log_event_list.py index 1d4154d2..e5e04fa7 100644 --- a/src/merge/resources/crm/types/paginated_audit_log_event_list.py +++ b/src/merge/resources/crm/types/paginated_audit_log_event_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .audit_log_event import AuditLogEvent -class PaginatedAuditLogEventList(pydantic_v1.BaseModel): +class PaginatedAuditLogEventList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[AuditLogEvent]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_contact_list.py b/src/merge/resources/crm/types/paginated_contact_list.py index dbe7c925..5e9c3fb7 100644 --- a/src/merge/resources/crm/types/paginated_contact_list.py +++ b/src/merge/resources/crm/types/paginated_contact_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .contact import Contact -class PaginatedContactList(pydantic_v1.BaseModel): +class PaginatedContactList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Contact]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_custom_object_class_list.py b/src/merge/resources/crm/types/paginated_custom_object_class_list.py index adff716b..b17aca09 100644 --- a/src/merge/resources/crm/types/paginated_custom_object_class_list.py +++ b/src/merge/resources/crm/types/paginated_custom_object_class_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .custom_object_class import CustomObjectClass -class PaginatedCustomObjectClassList(pydantic_v1.BaseModel): +class PaginatedCustomObjectClassList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[CustomObjectClass]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_custom_object_list.py b/src/merge/resources/crm/types/paginated_custom_object_list.py index 565b35f3..e7eb25de 100644 --- a/src/merge/resources/crm/types/paginated_custom_object_list.py +++ b/src/merge/resources/crm/types/paginated_custom_object_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .custom_object import CustomObject -class PaginatedCustomObjectList(pydantic_v1.BaseModel): +class PaginatedCustomObjectList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[CustomObject]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_engagement_list.py b/src/merge/resources/crm/types/paginated_engagement_list.py index 45ec1b90..20c2e59e 100644 --- a/src/merge/resources/crm/types/paginated_engagement_list.py +++ b/src/merge/resources/crm/types/paginated_engagement_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .engagement import Engagement -class PaginatedEngagementList(pydantic_v1.BaseModel): +class PaginatedEngagementList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Engagement]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_engagement_type_list.py b/src/merge/resources/crm/types/paginated_engagement_type_list.py index 6748c9f0..006b9058 100644 --- a/src/merge/resources/crm/types/paginated_engagement_type_list.py +++ b/src/merge/resources/crm/types/paginated_engagement_type_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .engagement_type import EngagementType -class PaginatedEngagementTypeList(pydantic_v1.BaseModel): +class PaginatedEngagementTypeList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[EngagementType]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_issue_list.py b/src/merge/resources/crm/types/paginated_issue_list.py index 1016e29a..da8437f1 100644 --- a/src/merge/resources/crm/types/paginated_issue_list.py +++ b/src/merge/resources/crm/types/paginated_issue_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .issue import Issue -class PaginatedIssueList(pydantic_v1.BaseModel): +class PaginatedIssueList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Issue]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_lead_list.py b/src/merge/resources/crm/types/paginated_lead_list.py index 9db391f4..241421e8 100644 --- a/src/merge/resources/crm/types/paginated_lead_list.py +++ b/src/merge/resources/crm/types/paginated_lead_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .lead import Lead -class PaginatedLeadList(pydantic_v1.BaseModel): +class PaginatedLeadList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Lead]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_note_list.py b/src/merge/resources/crm/types/paginated_note_list.py index b223ba28..d6aaa849 100644 --- a/src/merge/resources/crm/types/paginated_note_list.py +++ b/src/merge/resources/crm/types/paginated_note_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .note import Note -class PaginatedNoteList(pydantic_v1.BaseModel): +class PaginatedNoteList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Note]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_opportunity_list.py b/src/merge/resources/crm/types/paginated_opportunity_list.py index 1dd24e63..3bffc338 100644 --- a/src/merge/resources/crm/types/paginated_opportunity_list.py +++ b/src/merge/resources/crm/types/paginated_opportunity_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .opportunity import Opportunity -class PaginatedOpportunityList(pydantic_v1.BaseModel): +class PaginatedOpportunityList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Opportunity]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_remote_field_class_list.py b/src/merge/resources/crm/types/paginated_remote_field_class_list.py index 9105daee..bf0a4fd5 100644 --- a/src/merge/resources/crm/types/paginated_remote_field_class_list.py +++ b/src/merge/resources/crm/types/paginated_remote_field_class_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_field_class import RemoteFieldClass -class PaginatedRemoteFieldClassList(pydantic_v1.BaseModel): +class PaginatedRemoteFieldClassList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[RemoteFieldClass]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_stage_list.py b/src/merge/resources/crm/types/paginated_stage_list.py index 66cb59cc..40bcdb6d 100644 --- a/src/merge/resources/crm/types/paginated_stage_list.py +++ b/src/merge/resources/crm/types/paginated_stage_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .stage import Stage -class PaginatedStageList(pydantic_v1.BaseModel): +class PaginatedStageList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Stage]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_sync_status_list.py b/src/merge/resources/crm/types/paginated_sync_status_list.py index 6c88197e..7faca80c 100644 --- a/src/merge/resources/crm/types/paginated_sync_status_list.py +++ b/src/merge/resources/crm/types/paginated_sync_status_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .sync_status import SyncStatus -class PaginatedSyncStatusList(pydantic_v1.BaseModel): +class PaginatedSyncStatusList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[SyncStatus]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_task_list.py b/src/merge/resources/crm/types/paginated_task_list.py index 2388e837..f8ea4a12 100644 --- a/src/merge/resources/crm/types/paginated_task_list.py +++ b/src/merge/resources/crm/types/paginated_task_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .task import Task -class PaginatedTaskList(pydantic_v1.BaseModel): +class PaginatedTaskList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Task]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/paginated_user_list.py b/src/merge/resources/crm/types/paginated_user_list.py index 6faaf352..d3e02f65 100644 --- a/src/merge/resources/crm/types/paginated_user_list.py +++ b/src/merge/resources/crm/types/paginated_user_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .user import User -class PaginatedUserList(pydantic_v1.BaseModel): +class PaginatedUserList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[User]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/patched_account_request.py b/src/merge/resources/crm/types/patched_account_request.py index 92a1e7d0..d7026afc 100644 --- a/src/merge/resources/crm/types/patched_account_request.py +++ b/src/merge/resources/crm/types/patched_account_request.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .address_request import AddressRequest from .remote_field_request import RemoteFieldRequest -class PatchedAccountRequest(pydantic_v1.BaseModel): +class PatchedAccountRequest(UniversalBaseModel): """ # The Account Object @@ -22,38 +23,38 @@ class PatchedAccountRequest(pydantic_v1.BaseModel): TODO """ - owner: typing.Optional[str] = pydantic_v1.Field() + owner: typing.Optional[str] = pydantic.Field() """ The account's owner. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The account's name. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The account's description. """ - industry: typing.Optional[str] = pydantic_v1.Field() + industry: typing.Optional[str] = pydantic.Field() """ The account's industry. """ - website: typing.Optional[str] = pydantic_v1.Field() + website: typing.Optional[str] = pydantic.Field() """ The account's website. """ - number_of_employees: typing.Optional[int] = pydantic_v1.Field() + number_of_employees: typing.Optional[int] = pydantic.Field() """ The account's number of employees. """ addresses: typing.Optional[typing.List[AddressRequest]] - last_activity_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + last_activity_at: typing.Optional[dt.datetime] = pydantic.Field() """ The last date (either most recent or furthest in the future) of when an activity occurs in an account. """ @@ -62,20 +63,11 @@ class PatchedAccountRequest(pydantic_v1.BaseModel): linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] remote_fields: typing.Optional[typing.List[RemoteFieldRequest]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/patched_contact_request.py b/src/merge/resources/crm/types/patched_contact_request.py index c610d959..80c32cda 100644 --- a/src/merge/resources/crm/types/patched_contact_request.py +++ b/src/merge/resources/crm/types/patched_contact_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .address_request import AddressRequest from .email_address_request import EmailAddressRequest from .patched_contact_request_owner import PatchedContactRequestOwner @@ -12,7 +13,7 @@ from .remote_field_request import RemoteFieldRequest -class PatchedContactRequest(pydantic_v1.BaseModel): +class PatchedContactRequest(UniversalBaseModel): """ # The Contact Object @@ -25,22 +26,22 @@ class PatchedContactRequest(pydantic_v1.BaseModel): TODO """ - first_name: typing.Optional[str] = pydantic_v1.Field() + first_name: typing.Optional[str] = pydantic.Field() """ The contact's first name. """ - last_name: typing.Optional[str] = pydantic_v1.Field() + last_name: typing.Optional[str] = pydantic.Field() """ The contact's last name. """ - account: typing.Optional[str] = pydantic_v1.Field() + account: typing.Optional[str] = pydantic.Field() """ The contact's account. """ - owner: typing.Optional[PatchedContactRequestOwner] = pydantic_v1.Field() + owner: typing.Optional[PatchedContactRequestOwner] = pydantic.Field() """ The contact's owner. """ @@ -48,7 +49,7 @@ class PatchedContactRequest(pydantic_v1.BaseModel): addresses: typing.Optional[typing.List[AddressRequest]] email_addresses: typing.Optional[typing.List[EmailAddressRequest]] phone_numbers: typing.Optional[typing.List[PhoneNumberRequest]] - last_activity_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + last_activity_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the contact's last activity occurred. """ @@ -57,20 +58,11 @@ class PatchedContactRequest(pydantic_v1.BaseModel): linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] remote_fields: typing.Optional[typing.List[RemoteFieldRequest]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/patched_engagement_request.py b/src/merge/resources/crm/types/patched_engagement_request.py index f06710a9..23faeb3a 100644 --- a/src/merge/resources/crm/types/patched_engagement_request.py +++ b/src/merge/resources/crm/types/patched_engagement_request.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .patched_engagement_request_direction import PatchedEngagementRequestDirection from .remote_field_request import RemoteFieldRequest -class PatchedEngagementRequest(pydantic_v1.BaseModel): +class PatchedEngagementRequest(UniversalBaseModel): """ # The Engagement Object @@ -22,22 +23,22 @@ class PatchedEngagementRequest(pydantic_v1.BaseModel): TODO """ - owner: typing.Optional[str] = pydantic_v1.Field() + owner: typing.Optional[str] = pydantic.Field() """ The engagement's owner. """ - content: typing.Optional[str] = pydantic_v1.Field() + content: typing.Optional[str] = pydantic.Field() """ The engagement's content. """ - subject: typing.Optional[str] = pydantic_v1.Field() + subject: typing.Optional[str] = pydantic.Field() """ The engagement's subject. """ - direction: typing.Optional[PatchedEngagementRequestDirection] = pydantic_v1.Field() + direction: typing.Optional[PatchedEngagementRequestDirection] = pydantic.Field() """ The engagement's direction. @@ -45,22 +46,22 @@ class PatchedEngagementRequest(pydantic_v1.BaseModel): - `OUTBOUND` - OUTBOUND """ - engagement_type: typing.Optional[str] = pydantic_v1.Field() + engagement_type: typing.Optional[str] = pydantic.Field() """ The engagement type of the engagement. """ - start_time: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_time: typing.Optional[dt.datetime] = pydantic.Field() """ The time at which the engagement started. """ - end_time: typing.Optional[dt.datetime] = pydantic_v1.Field() + end_time: typing.Optional[dt.datetime] = pydantic.Field() """ The time at which the engagement ended. """ - account: typing.Optional[str] = pydantic_v1.Field() + account: typing.Optional[str] = pydantic.Field() """ The account of the engagement. """ @@ -70,20 +71,11 @@ class PatchedEngagementRequest(pydantic_v1.BaseModel): linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] remote_fields: typing.Optional[typing.List[RemoteFieldRequest]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/patched_opportunity_request.py b/src/merge/resources/crm/types/patched_opportunity_request.py index 5b357bd2..18a4615c 100644 --- a/src/merge/resources/crm/types/patched_opportunity_request.py +++ b/src/merge/resources/crm/types/patched_opportunity_request.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .patched_opportunity_request_status import PatchedOpportunityRequestStatus from .remote_field_request import RemoteFieldRequest -class PatchedOpportunityRequest(pydantic_v1.BaseModel): +class PatchedOpportunityRequest(UniversalBaseModel): """ # The Opportunity Object @@ -22,37 +23,37 @@ class PatchedOpportunityRequest(pydantic_v1.BaseModel): TODO """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The opportunity's name. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The opportunity's description. """ - amount: typing.Optional[int] = pydantic_v1.Field() + amount: typing.Optional[int] = pydantic.Field() """ The opportunity's amount. """ - owner: typing.Optional[str] = pydantic_v1.Field() + owner: typing.Optional[str] = pydantic.Field() """ The opportunity's owner. """ - account: typing.Optional[str] = pydantic_v1.Field() + account: typing.Optional[str] = pydantic.Field() """ The account of the opportunity. """ - stage: typing.Optional[str] = pydantic_v1.Field() + stage: typing.Optional[str] = pydantic.Field() """ The stage of the opportunity. """ - status: typing.Optional[PatchedOpportunityRequestStatus] = pydantic_v1.Field() + status: typing.Optional[PatchedOpportunityRequestStatus] = pydantic.Field() """ The opportunity's status. @@ -61,12 +62,12 @@ class PatchedOpportunityRequest(pydantic_v1.BaseModel): - `LOST` - LOST """ - last_activity_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + last_activity_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the opportunity's last activity occurred. """ - close_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + close_date: typing.Optional[dt.datetime] = pydantic.Field() """ When the opportunity was closed. """ @@ -75,20 +76,11 @@ class PatchedOpportunityRequest(pydantic_v1.BaseModel): linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] remote_fields: typing.Optional[typing.List[RemoteFieldRequest]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/patched_task_request.py b/src/merge/resources/crm/types/patched_task_request.py index ade5b200..eede4487 100644 --- a/src/merge/resources/crm/types/patched_task_request.py +++ b/src/merge/resources/crm/types/patched_task_request.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .patched_task_request_status import PatchedTaskRequestStatus from .remote_field_request import RemoteFieldRequest -class PatchedTaskRequest(pydantic_v1.BaseModel): +class PatchedTaskRequest(UniversalBaseModel): """ # The Task Object @@ -22,42 +23,42 @@ class PatchedTaskRequest(pydantic_v1.BaseModel): TODO """ - subject: typing.Optional[str] = pydantic_v1.Field() + subject: typing.Optional[str] = pydantic.Field() """ The task's subject. """ - content: typing.Optional[str] = pydantic_v1.Field() + content: typing.Optional[str] = pydantic.Field() """ The task's content. """ - owner: typing.Optional[str] = pydantic_v1.Field() + owner: typing.Optional[str] = pydantic.Field() """ The task's owner. """ - account: typing.Optional[str] = pydantic_v1.Field() + account: typing.Optional[str] = pydantic.Field() """ The task's account. """ - opportunity: typing.Optional[str] = pydantic_v1.Field() + opportunity: typing.Optional[str] = pydantic.Field() """ The task's opportunity. """ - completed_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + completed_date: typing.Optional[dt.datetime] = pydantic.Field() """ When the task is completed. """ - due_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + due_date: typing.Optional[dt.datetime] = pydantic.Field() """ When the task is due. """ - status: typing.Optional[PatchedTaskRequestStatus] = pydantic_v1.Field() + status: typing.Optional[PatchedTaskRequestStatus] = pydantic.Field() """ The task's status. @@ -69,20 +70,11 @@ class PatchedTaskRequest(pydantic_v1.BaseModel): linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] remote_fields: typing.Optional[typing.List[RemoteFieldRequest]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/phone_number.py b/src/merge/resources/crm/types/phone_number.py index 30be246c..fd9d60ea 100644 --- a/src/merge/resources/crm/types/phone_number.py +++ b/src/merge/resources/crm/types/phone_number.py @@ -3,11 +3,12 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class PhoneNumber(pydantic_v1.BaseModel): + +class PhoneNumber(UniversalBaseModel): """ # The PhoneNumber Object @@ -20,40 +21,31 @@ class PhoneNumber(pydantic_v1.BaseModel): Fetch from the `GET Contact` endpoint and view their phone numbers. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - phone_number: typing.Optional[str] = pydantic_v1.Field() + phone_number: typing.Optional[str] = pydantic.Field() """ The phone number. """ - phone_number_type: typing.Optional[str] = pydantic_v1.Field() + phone_number_type: typing.Optional[str] = pydantic.Field() """ The phone number's type. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/phone_number_request.py b/src/merge/resources/crm/types/phone_number_request.py index 1569e5be..ad089b62 100644 --- a/src/merge/resources/crm/types/phone_number_request.py +++ b/src/merge/resources/crm/types/phone_number_request.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class PhoneNumberRequest(pydantic_v1.BaseModel): + +class PhoneNumberRequest(UniversalBaseModel): """ # The PhoneNumber Object @@ -20,12 +20,12 @@ class PhoneNumberRequest(pydantic_v1.BaseModel): Fetch from the `GET Contact` endpoint and view their phone numbers. """ - phone_number: typing.Optional[str] = pydantic_v1.Field() + phone_number: typing.Optional[str] = pydantic.Field() """ The phone number. """ - phone_number_type: typing.Optional[str] = pydantic_v1.Field() + phone_number_type: typing.Optional[str] = pydantic.Field() """ The phone number's type. """ @@ -33,20 +33,11 @@ class PhoneNumberRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/remote_data.py b/src/merge/resources/crm/types/remote_data.py index 098f551b..d50bfca2 100644 --- a/src/merge/resources/crm/types/remote_data.py +++ b/src/merge/resources/crm/types/remote_data.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteData(pydantic_v1.BaseModel): + +class RemoteData(UniversalBaseModel): path: str data: typing.Optional[typing.Any] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/remote_endpoint_info.py b/src/merge/resources/crm/types/remote_endpoint_info.py index da6037bc..9f627cae 100644 --- a/src/merge/resources/crm/types/remote_endpoint_info.py +++ b/src/merge/resources/crm/types/remote_endpoint_info.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteEndpointInfo(pydantic_v1.BaseModel): + +class RemoteEndpointInfo(UniversalBaseModel): method: str url_path: str field_traversal_path: typing.List[typing.Any] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/remote_field.py b/src/merge/resources/crm/types/remote_field.py index 5b802740..a727034a 100644 --- a/src/merge/resources/crm/types/remote_field.py +++ b/src/merge/resources/crm/types/remote_field.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_field_remote_field_class import RemoteFieldRemoteFieldClass -class RemoteField(pydantic_v1.BaseModel): +class RemoteField(UniversalBaseModel): remote_field_class: RemoteFieldRemoteFieldClass value: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/remote_field_api.py b/src/merge/resources/crm/types/remote_field_api.py index c2a16698..1d1efb6a 100644 --- a/src/merge/resources/crm/types/remote_field_api.py +++ b/src/merge/resources/crm/types/remote_field_api.py @@ -1,39 +1,28 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .advanced_metadata import AdvancedMetadata from .remote_endpoint_info import RemoteEndpointInfo from .remote_field_api_coverage import RemoteFieldApiCoverage -class RemoteFieldApi(pydantic_v1.BaseModel): - schema_: typing.Dict[str, typing.Any] = pydantic_v1.Field(alias="schema") +class RemoteFieldApi(UniversalBaseModel): + schema_: typing.Dict[str, typing.Any] = pydantic.Field(alias="schema") remote_key_name: str remote_endpoint_info: RemoteEndpointInfo example_values: typing.List[typing.Any] advanced_metadata: typing.Optional[AdvancedMetadata] coverage: typing.Optional[RemoteFieldApiCoverage] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/remote_field_api_response.py b/src/merge/resources/crm/types/remote_field_api_response.py index ab188888..d6f75715 100644 --- a/src/merge/resources/crm/types/remote_field_api_response.py +++ b/src/merge/resources/crm/types/remote_field_api_response.py @@ -1,40 +1,29 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .remote_field_api import RemoteFieldApi - - -class RemoteFieldApiResponse(pydantic_v1.BaseModel): - account: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Account") - contact: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Contact") - lead: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Lead") - note: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Note") - opportunity: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Opportunity") - stage: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Stage") - user: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="User") - task: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Task") - engagement: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Engagement") +import pydantic - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .remote_field_api import RemoteFieldApi - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} +class RemoteFieldApiResponse(UniversalBaseModel): + account: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Account") + contact: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Contact") + lead: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Lead") + note: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Note") + opportunity: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Opportunity") + stage: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Stage") + user: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="User") + task: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Task") + engagement: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Engagement") + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/remote_field_class.py b/src/merge/resources/crm/types/remote_field_class.py index d8f7a788..7dd62e6b 100644 --- a/src/merge/resources/crm/types/remote_field_class.py +++ b/src/merge/resources/crm/types/remote_field_class.py @@ -1,17 +1,17 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .item_schema import ItemSchema from .remote_field_class_field_choices_item import RemoteFieldClassFieldChoicesItem from .remote_field_class_field_format import RemoteFieldClassFieldFormat from .remote_field_class_field_type import RemoteFieldClassFieldType -class RemoteFieldClass(pydantic_v1.BaseModel): +class RemoteFieldClass(UniversalBaseModel): id: typing.Optional[str] display_name: typing.Optional[str] remote_key_name: typing.Optional[str] @@ -23,20 +23,11 @@ class RemoteFieldClass(pydantic_v1.BaseModel): field_choices: typing.Optional[typing.List[RemoteFieldClassFieldChoicesItem]] item_schema: typing.Optional[ItemSchema] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/remote_field_class_field_choices_item.py b/src/merge/resources/crm/types/remote_field_class_field_choices_item.py index 19926fa0..4b011ac4 100644 --- a/src/merge/resources/crm/types/remote_field_class_field_choices_item.py +++ b/src/merge/resources/crm/types/remote_field_class_field_choices_item.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteFieldClassFieldChoicesItem(pydantic_v1.BaseModel): + +class RemoteFieldClassFieldChoicesItem(UniversalBaseModel): value: typing.Optional[typing.Any] display_name: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/remote_field_class_for_custom_object_class.py b/src/merge/resources/crm/types/remote_field_class_for_custom_object_class.py index 230ed9e1..44d3c510 100644 --- a/src/merge/resources/crm/types/remote_field_class_for_custom_object_class.py +++ b/src/merge/resources/crm/types/remote_field_class_for_custom_object_class.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_field_class_for_custom_object_class_field_choices_item import ( RemoteFieldClassForCustomObjectClassFieldChoicesItem, ) @@ -13,13 +14,13 @@ from .remote_field_class_for_custom_object_class_item_schema import RemoteFieldClassForCustomObjectClassItemSchema -class RemoteFieldClassForCustomObjectClass(pydantic_v1.BaseModel): - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() +class RemoteFieldClassForCustomObjectClass(UniversalBaseModel): + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ @@ -33,20 +34,11 @@ class RemoteFieldClassForCustomObjectClass(pydantic_v1.BaseModel): field_choices: typing.Optional[typing.List[RemoteFieldClassForCustomObjectClassFieldChoicesItem]] item_schema: typing.Optional[RemoteFieldClassForCustomObjectClassItemSchema] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/remote_field_class_for_custom_object_class_field_choices_item.py b/src/merge/resources/crm/types/remote_field_class_for_custom_object_class_field_choices_item.py index fa4b5978..5665e0b2 100644 --- a/src/merge/resources/crm/types/remote_field_class_for_custom_object_class_field_choices_item.py +++ b/src/merge/resources/crm/types/remote_field_class_for_custom_object_class_field_choices_item.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteFieldClassForCustomObjectClassFieldChoicesItem(pydantic_v1.BaseModel): + +class RemoteFieldClassForCustomObjectClassFieldChoicesItem(UniversalBaseModel): value: typing.Optional[typing.Any] display_name: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/remote_field_class_for_custom_object_class_item_schema.py b/src/merge/resources/crm/types/remote_field_class_for_custom_object_class_item_schema.py index f7305515..a24ed7ce 100644 --- a/src/merge/resources/crm/types/remote_field_class_for_custom_object_class_item_schema.py +++ b/src/merge/resources/crm/types/remote_field_class_for_custom_object_class_item_schema.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteFieldClassForCustomObjectClassItemSchema(pydantic_v1.BaseModel): + +class RemoteFieldClassForCustomObjectClassItemSchema(UniversalBaseModel): item_type: typing.Optional[str] item_format: typing.Optional[str] item_choices: typing.Optional[typing.List[typing.Optional[str]]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/remote_field_request.py b/src/merge/resources/crm/types/remote_field_request.py index 84d08873..46af4bfc 100644 --- a/src/merge/resources/crm/types/remote_field_request.py +++ b/src/merge/resources/crm/types/remote_field_request.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_field_request_remote_field_class import RemoteFieldRequestRemoteFieldClass -class RemoteFieldRequest(pydantic_v1.BaseModel): +class RemoteFieldRequest(UniversalBaseModel): remote_field_class: RemoteFieldRequestRemoteFieldClass value: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/remote_key.py b/src/merge/resources/crm/types/remote_key.py index e0bec368..0ce7d620 100644 --- a/src/merge/resources/crm/types/remote_key.py +++ b/src/merge/resources/crm/types/remote_key.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteKey(pydantic_v1.BaseModel): + +class RemoteKey(UniversalBaseModel): """ # The RemoteKey Object @@ -23,20 +23,11 @@ class RemoteKey(pydantic_v1.BaseModel): name: str key: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/remote_response.py b/src/merge/resources/crm/types/remote_response.py index f39951ee..5551bafc 100644 --- a/src/merge/resources/crm/types/remote_response.py +++ b/src/merge/resources/crm/types/remote_response.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .response_type_enum import ResponseTypeEnum -class RemoteResponse(pydantic_v1.BaseModel): +class RemoteResponse(UniversalBaseModel): """ # The RemoteResponse Object @@ -29,20 +29,11 @@ class RemoteResponse(pydantic_v1.BaseModel): response_type: typing.Optional[ResponseTypeEnum] headers: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/stage.py b/src/merge/resources/crm/types/stage.py index 16e269e0..650bc4da 100644 --- a/src/merge/resources/crm/types/stage.py +++ b/src/merge/resources/crm/types/stage.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData from .remote_field import RemoteField -class Stage(pydantic_v1.BaseModel): +class Stage(UniversalBaseModel): """ # The Stage Object @@ -23,27 +24,27 @@ class Stage(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The stage's name. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -52,20 +53,11 @@ class Stage(pydantic_v1.BaseModel): remote_data: typing.Optional[typing.List[RemoteData]] remote_fields: typing.Optional[typing.List[RemoteField]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/sync_status.py b/src/merge/resources/crm/types/sync_status.py index c6b7cbc4..03668cbf 100644 --- a/src/merge/resources/crm/types/sync_status.py +++ b/src/merge/resources/crm/types/sync_status.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .selective_sync_configurations_usage_enum import SelectiveSyncConfigurationsUsageEnum from .sync_status_status_enum import SyncStatusStatusEnum -class SyncStatus(pydantic_v1.BaseModel): +class SyncStatus(UniversalBaseModel): """ # The SyncStatus Object @@ -30,20 +31,11 @@ class SyncStatus(pydantic_v1.BaseModel): is_initial_sync: bool selective_sync_configurations_usage: typing.Optional[SelectiveSyncConfigurationsUsageEnum] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/task.py b/src/merge/resources/crm/types/task.py index c07e3f38..0513a652 100644 --- a/src/merge/resources/crm/types/task.py +++ b/src/merge/resources/crm/types/task.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData from .remote_field import RemoteField from .task_account import TaskAccount @@ -13,7 +14,7 @@ from .task_status import TaskStatus -class Task(pydantic_v1.BaseModel): +class Task(UniversalBaseModel): """ # The Task Object @@ -27,57 +28,57 @@ class Task(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - subject: typing.Optional[str] = pydantic_v1.Field() + subject: typing.Optional[str] = pydantic.Field() """ The task's subject. """ - content: typing.Optional[str] = pydantic_v1.Field() + content: typing.Optional[str] = pydantic.Field() """ The task's content. """ - owner: typing.Optional[TaskOwner] = pydantic_v1.Field() + owner: typing.Optional[TaskOwner] = pydantic.Field() """ The task's owner. """ - account: typing.Optional[TaskAccount] = pydantic_v1.Field() + account: typing.Optional[TaskAccount] = pydantic.Field() """ The task's account. """ - opportunity: typing.Optional[TaskOpportunity] = pydantic_v1.Field() + opportunity: typing.Optional[TaskOpportunity] = pydantic.Field() """ The task's opportunity. """ - completed_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + completed_date: typing.Optional[dt.datetime] = pydantic.Field() """ When the task is completed. """ - due_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + due_date: typing.Optional[dt.datetime] = pydantic.Field() """ When the task is due. """ - status: typing.Optional[TaskStatus] = pydantic_v1.Field() + status: typing.Optional[TaskStatus] = pydantic.Field() """ The task's status. @@ -85,7 +86,7 @@ class Task(pydantic_v1.BaseModel): - `CLOSED` - CLOSED """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -94,20 +95,11 @@ class Task(pydantic_v1.BaseModel): remote_data: typing.Optional[typing.List[RemoteData]] remote_fields: typing.Optional[typing.List[RemoteField]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/task_request.py b/src/merge/resources/crm/types/task_request.py index 2f532261..b6de1a01 100644 --- a/src/merge/resources/crm/types/task_request.py +++ b/src/merge/resources/crm/types/task_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_field_request import RemoteFieldRequest from .task_request_account import TaskRequestAccount from .task_request_opportunity import TaskRequestOpportunity @@ -12,7 +13,7 @@ from .task_request_status import TaskRequestStatus -class TaskRequest(pydantic_v1.BaseModel): +class TaskRequest(UniversalBaseModel): """ # The Task Object @@ -25,42 +26,42 @@ class TaskRequest(pydantic_v1.BaseModel): TODO """ - subject: typing.Optional[str] = pydantic_v1.Field() + subject: typing.Optional[str] = pydantic.Field() """ The task's subject. """ - content: typing.Optional[str] = pydantic_v1.Field() + content: typing.Optional[str] = pydantic.Field() """ The task's content. """ - owner: typing.Optional[TaskRequestOwner] = pydantic_v1.Field() + owner: typing.Optional[TaskRequestOwner] = pydantic.Field() """ The task's owner. """ - account: typing.Optional[TaskRequestAccount] = pydantic_v1.Field() + account: typing.Optional[TaskRequestAccount] = pydantic.Field() """ The task's account. """ - opportunity: typing.Optional[TaskRequestOpportunity] = pydantic_v1.Field() + opportunity: typing.Optional[TaskRequestOpportunity] = pydantic.Field() """ The task's opportunity. """ - completed_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + completed_date: typing.Optional[dt.datetime] = pydantic.Field() """ When the task is completed. """ - due_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + due_date: typing.Optional[dt.datetime] = pydantic.Field() """ When the task is due. """ - status: typing.Optional[TaskRequestStatus] = pydantic_v1.Field() + status: typing.Optional[TaskRequestStatus] = pydantic.Field() """ The task's status. @@ -72,20 +73,11 @@ class TaskRequest(pydantic_v1.BaseModel): linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] remote_fields: typing.Optional[typing.List[RemoteFieldRequest]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/task_response.py b/src/merge/resources/crm/types/task_response.py index bd03898e..1374c271 100644 --- a/src/merge/resources/crm/types/task_response.py +++ b/src/merge/resources/crm/types/task_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .task import Task from .warning_validation_problem import WarningValidationProblem -class TaskResponse(pydantic_v1.BaseModel): +class TaskResponse(UniversalBaseModel): model: Task warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/user.py b/src/merge/resources/crm/types/user.py index d0b81245..7c827364 100644 --- a/src/merge/resources/crm/types/user.py +++ b/src/merge/resources/crm/types/user.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData from .remote_field import RemoteField -class User(pydantic_v1.BaseModel): +class User(UniversalBaseModel): """ # The User Object @@ -23,37 +24,37 @@ class User(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The user's name. """ - email: typing.Optional[str] = pydantic_v1.Field() + email: typing.Optional[str] = pydantic.Field() """ The user's email address. """ - is_active: typing.Optional[bool] = pydantic_v1.Field() + is_active: typing.Optional[bool] = pydantic.Field() """ Whether or not the user is active. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -62,20 +63,11 @@ class User(pydantic_v1.BaseModel): remote_data: typing.Optional[typing.List[RemoteData]] remote_fields: typing.Optional[typing.List[RemoteField]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/validation_problem_source.py b/src/merge/resources/crm/types/validation_problem_source.py index fde15b40..c65d82ef 100644 --- a/src/merge/resources/crm/types/validation_problem_source.py +++ b/src/merge/resources/crm/types/validation_problem_source.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ValidationProblemSource(pydantic_v1.BaseModel): - pointer: str - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ValidationProblemSource(UniversalBaseModel): + pointer: str - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/warning_validation_problem.py b/src/merge/resources/crm/types/warning_validation_problem.py index 6baf9600..348d668a 100644 --- a/src/merge/resources/crm/types/warning_validation_problem.py +++ b/src/merge/resources/crm/types/warning_validation_problem.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .validation_problem_source import ValidationProblemSource -class WarningValidationProblem(pydantic_v1.BaseModel): +class WarningValidationProblem(UniversalBaseModel): source: typing.Optional[ValidationProblemSource] title: str detail: str problem_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/crm/types/webhook_receiver.py b/src/merge/resources/crm/types/webhook_receiver.py index 0544f256..bb10af95 100644 --- a/src/merge/resources/crm/types/webhook_receiver.py +++ b/src/merge/resources/crm/types/webhook_receiver.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class WebhookReceiver(pydantic_v1.BaseModel): + +class WebhookReceiver(UniversalBaseModel): event: str is_active: bool key: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/resources/account_details/client.py b/src/merge/resources/filestorage/resources/account_details/client.py index 2b81889a..857cc544 100644 --- a/src/merge/resources/filestorage/resources/account_details/client.py +++ b/src/merge/resources/filestorage/resources/account_details/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.account_details import AccountDetails @@ -41,9 +41,9 @@ def retrieve(self, *, request_options: typing.Optional[RequestOptions] = None) - _response = self._client_wrapper.httpx_client.request( "filestorage/v1/account-details", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountDetails, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountDetails, parse_obj_as(type_=AccountDetails, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -70,20 +70,28 @@ async def retrieve(self, *, request_options: typing.Optional[RequestOptions] = N Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.account_details.retrieve() + + + async def main() -> None: + await client.filestorage.account_details.retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/account-details", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountDetails, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountDetails, parse_obj_as(type_=AccountDetails, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/account_token/client.py b/src/merge/resources/filestorage/resources/account_token/client.py index 5f949d96..23dd23c0 100644 --- a/src/merge/resources/filestorage/resources/account_token/client.py +++ b/src/merge/resources/filestorage/resources/account_token/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.account_token import AccountToken @@ -48,9 +48,9 @@ def retrieve(self, public_token: str, *, request_options: typing.Optional[Reques method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountToken, parse_obj_as(type_=AccountToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -81,24 +81,32 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.account_token.retrieve( - public_token="public_token", - ) + + + async def main() -> None: + await client.filestorage.account_token.retrieve( + public_token="public_token", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"filestorage/v1/account-token/{jsonable_encoder(public_token)}", method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountToken, parse_obj_as(type_=AccountToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/async_passthrough/client.py b/src/merge/resources/filestorage/resources/async_passthrough/client.py index e6a277b6..a7742176 100644 --- a/src/merge/resources/filestorage/resources/async_passthrough/client.py +++ b/src/merge/resources/filestorage/resources/async_passthrough/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.async_passthrough_reciept import AsyncPassthroughReciept from ...types.data_passthrough_request import DataPassthroughRequest @@ -57,9 +57,9 @@ def create( _response = self._client_wrapper.httpx_client.request( "filestorage/v1/async-passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AsyncPassthroughReciept, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AsyncPassthroughReciept, parse_obj_as(type_=AsyncPassthroughReciept, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -100,9 +100,9 @@ def retrieve( method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -133,6 +133,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.filestorage import DataPassthroughRequest, MethodEnum @@ -140,19 +142,25 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.async_passthrough.create( - request=DataPassthroughRequest( - method=MethodEnum.GET, - path="/scooters", - ), - ) + + + async def main() -> None: + await client.filestorage.async_passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/async-passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AsyncPassthroughReciept, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AsyncPassthroughReciept, parse_obj_as(type_=AsyncPassthroughReciept, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -178,24 +186,32 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.async_passthrough.retrieve( - async_passthrough_receipt_id="async_passthrough_receipt_id", - ) + + + async def main() -> None: + await client.filestorage.async_passthrough.retrieve( + async_passthrough_receipt_id="async_passthrough_receipt_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"filestorage/v1/async-passthrough/{jsonable_encoder(async_passthrough_receipt_id)}", method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/audit_trail/client.py b/src/merge/resources/filestorage/resources/audit_trail/client.py index f9eda949..436275ec 100644 --- a/src/merge/resources/filestorage/resources/audit_trail/client.py +++ b/src/merge/resources/filestorage/resources/audit_trail/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_audit_log_event_list import PaginatedAuditLogEventList @@ -79,9 +79,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAuditLogEventList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAuditLogEventList, parse_obj_as(type_=PaginatedAuditLogEventList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -136,13 +136,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.audit_trail.list() + + + async def main() -> None: + await client.filestorage.audit_trail.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/audit-trail", @@ -157,9 +165,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAuditLogEventList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAuditLogEventList, parse_obj_as(type_=PaginatedAuditLogEventList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/available_actions/client.py b/src/merge/resources/filestorage/resources/available_actions/client.py index 9e9210a0..559f5b17 100644 --- a/src/merge/resources/filestorage/resources/available_actions/client.py +++ b/src/merge/resources/filestorage/resources/available_actions/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.available_actions import AvailableActions @@ -41,9 +41,9 @@ def retrieve(self, *, request_options: typing.Optional[RequestOptions] = None) - _response = self._client_wrapper.httpx_client.request( "filestorage/v1/available-actions", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AvailableActions, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AvailableActions, parse_obj_as(type_=AvailableActions, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -70,20 +70,28 @@ async def retrieve(self, *, request_options: typing.Optional[RequestOptions] = N Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.available_actions.retrieve() + + + async def main() -> None: + await client.filestorage.available_actions.retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/available-actions", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AvailableActions, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AvailableActions, parse_obj_as(type_=AvailableActions, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/delete_account/client.py b/src/merge/resources/filestorage/resources/delete_account/client.py index a86fc679..e64ffd53 100644 --- a/src/merge/resources/filestorage/resources/delete_account/client.py +++ b/src/merge/resources/filestorage/resources/delete_account/client.py @@ -38,9 +38,9 @@ def delete(self, *, request_options: typing.Optional[RequestOptions] = None) -> _response = self._client_wrapper.httpx_client.request( "filestorage/v1/delete-account", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -66,20 +66,28 @@ async def delete(self, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.delete_account.delete() + + + async def main() -> None: + await client.filestorage.delete_account.delete() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/delete-account", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/drives/client.py b/src/merge/resources/filestorage/resources/drives/client.py index 3e8a5ca0..795b0baf 100644 --- a/src/merge/resources/filestorage/resources/drives/client.py +++ b/src/merge/resources/filestorage/resources/drives/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.drive import Drive from ...types.paginated_drive_list import PaginatedDriveList @@ -103,9 +103,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedDriveList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedDriveList, parse_obj_as(type_=PaginatedDriveList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -154,9 +154,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Drive, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Drive, parse_obj_as(type_=Drive, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -227,13 +227,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.drives.list() + + + async def main() -> None: + await client.filestorage.drives.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/drives", @@ -252,9 +260,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedDriveList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedDriveList, parse_obj_as(type_=PaginatedDriveList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -287,15 +295,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.drives.retrieve( - id="id", - ) + + + async def main() -> None: + await client.filestorage.drives.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"filestorage/v1/drives/{jsonable_encoder(id)}", @@ -303,9 +319,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Drive, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Drive, parse_obj_as(type_=Drive, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/field_mapping/client.py b/src/merge/resources/filestorage/resources/field_mapping/client.py index 1c6ee7b0..b3183a00 100644 --- a/src/merge/resources/filestorage/resources/field_mapping/client.py +++ b/src/merge/resources/filestorage/resources/field_mapping/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.external_target_field_api_response import ExternalTargetFieldApiResponse from ...types.field_mapping_api_instance_response import FieldMappingApiInstanceResponse @@ -50,9 +50,9 @@ def field_mappings_retrieve( _response = self._client_wrapper.httpx_client.request( "filestorage/v1/field-mappings", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingApiInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingApiInstanceResponse, parse_obj_as(type_=FieldMappingApiInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -131,9 +131,9 @@ def field_mappings_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -174,9 +174,9 @@ def field_mappings_destroy( method="DELETE", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -238,9 +238,9 @@ def field_mappings_partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -288,9 +288,9 @@ def remote_fields_retrieve( params={"common_models": common_models, "include_example_values": include_example_values}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteFieldApiResponse, parse_obj_as(type_=RemoteFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -325,9 +325,9 @@ def target_fields_retrieve( _response = self._client_wrapper.httpx_client.request( "filestorage/v1/target-fields", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExternalTargetFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ExternalTargetFieldApiResponse, parse_obj_as(type_=ExternalTargetFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -356,20 +356,28 @@ async def field_mappings_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.field_mapping.field_mappings_retrieve() + + + async def main() -> None: + await client.filestorage.field_mapping.field_mappings_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/field-mappings", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingApiInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingApiInstanceResponse, parse_obj_as(type_=FieldMappingApiInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -419,20 +427,28 @@ async def field_mappings_create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.field_mapping.field_mappings_create( - target_field_name="example_target_field_name", - target_field_description="this is a example description of the target field", - remote_field_traversal_path=["example_remote_field"], - remote_method="GET", - remote_url_path="/example-url-path", - common_model_name="ExampleCommonModel", - ) + + + async def main() -> None: + await client.filestorage.field_mapping.field_mappings_create( + target_field_name="example_target_field_name", + target_field_description="this is a example description of the target field", + remote_field_traversal_path=["example_remote_field"], + remote_method="GET", + remote_url_path="/example-url-path", + common_model_name="ExampleCommonModel", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/field-mappings", @@ -448,9 +464,9 @@ async def field_mappings_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -476,24 +492,32 @@ async def field_mappings_destroy( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.field_mapping.field_mappings_destroy( - field_mapping_id="field_mapping_id", - ) + + + async def main() -> None: + await client.filestorage.field_mapping.field_mappings_destroy( + field_mapping_id="field_mapping_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"filestorage/v1/field-mappings/{jsonable_encoder(field_mapping_id)}", method="DELETE", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -534,15 +558,23 @@ async def field_mappings_partial_update( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.field_mapping.field_mappings_partial_update( - field_mapping_id="field_mapping_id", - ) + + + async def main() -> None: + await client.filestorage.field_mapping.field_mappings_partial_update( + field_mapping_id="field_mapping_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"filestorage/v1/field-mappings/{jsonable_encoder(field_mapping_id)}", @@ -555,9 +587,9 @@ async def field_mappings_partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -591,13 +623,21 @@ async def remote_fields_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.field_mapping.remote_fields_retrieve() + + + async def main() -> None: + await client.filestorage.field_mapping.remote_fields_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/remote-fields", @@ -605,9 +645,9 @@ async def remote_fields_retrieve( params={"common_models": common_models, "include_example_values": include_example_values}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteFieldApiResponse, parse_obj_as(type_=RemoteFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -631,20 +671,28 @@ async def target_fields_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.field_mapping.target_fields_retrieve() + + + async def main() -> None: + await client.filestorage.field_mapping.target_fields_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/target-fields", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExternalTargetFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ExternalTargetFieldApiResponse, parse_obj_as(type_=ExternalTargetFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/files/client.py b/src/merge/resources/filestorage/resources/files/client.py index 4b5b2e7b..28a35d5d 100644 --- a/src/merge/resources/filestorage/resources/files/client.py +++ b/src/merge/resources/filestorage/resources/files/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.file import File from ...types.file_request import FileRequest @@ -131,9 +131,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedFileList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedFileList, parse_obj_as(type_=PaginatedFileList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -189,9 +189,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FileStorageFileResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FileStorageFileResponse, parse_obj_as(type_=FileStorageFileResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -244,9 +244,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(File, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(File, parse_obj_as(type_=File, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -296,12 +296,12 @@ def download_retrieve( params={"mime_type": mime_type}, request_options=request_options, ) as _response: - if 200 <= _response.status_code < 300: - for _chunk in _response.iter_bytes(): - yield _chunk - return - _response.read() try: + if 200 <= _response.status_code < 300: + for _chunk in _response.iter_bytes(): + yield _chunk + return + _response.read() _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -334,9 +334,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "filestorage/v1/files/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -423,13 +423,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.files.list() + + + async def main() -> None: + await client.filestorage.files.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/files", @@ -452,9 +460,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedFileList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedFileList, parse_obj_as(type_=PaginatedFileList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -491,6 +499,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.filestorage import FileRequest @@ -498,9 +508,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.files.create( - model=FileRequest(), - ) + + + async def main() -> None: + await client.filestorage.files.create( + model=FileRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/files", @@ -510,9 +526,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FileStorageFileResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FileStorageFileResponse, parse_obj_as(type_=FileStorageFileResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -549,15 +565,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.files.retrieve( - id="id", - ) + + + async def main() -> None: + await client.filestorage.files.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"filestorage/v1/files/{jsonable_encoder(id)}", @@ -565,9 +589,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(File, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(File, parse_obj_as(type_=File, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -600,16 +624,24 @@ async def download_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.files.download_retrieve( - id="string", - mime_type="string", - ) + + + async def main() -> None: + await client.filestorage.files.download_retrieve( + id="string", + mime_type="string", + ) + + + asyncio.run(main()) """ async with self._client_wrapper.httpx_client.stream( f"filestorage/v1/files/{jsonable_encoder(id)}/download", @@ -617,12 +649,12 @@ async def download_retrieve( params={"mime_type": mime_type}, request_options=request_options, ) as _response: - if 200 <= _response.status_code < 300: - async for _chunk in _response.aiter_bytes(): - yield _chunk - return - await _response.aread() try: + if 200 <= _response.status_code < 300: + async for _chunk in _response.aiter_bytes(): + yield _chunk + return + await _response.aread() _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -644,20 +676,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.files.meta_post_retrieve() + + + async def main() -> None: + await client.filestorage.files.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/files/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/folders/client.py b/src/merge/resources/filestorage/resources/folders/client.py index 8a82daa9..3cb3a60d 100644 --- a/src/merge/resources/filestorage/resources/folders/client.py +++ b/src/merge/resources/filestorage/resources/folders/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.file_storage_folder_response import FileStorageFolderResponse from ...types.folder import Folder @@ -126,9 +126,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedFolderList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedFolderList, parse_obj_as(type_=PaginatedFolderList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -184,9 +184,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FileStorageFolderResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FileStorageFolderResponse, parse_obj_as(type_=FileStorageFolderResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -239,9 +239,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Folder, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Folder, parse_obj_as(type_=Folder, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -274,9 +274,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "filestorage/v1/folders/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -359,13 +359,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.folders.list() + + + async def main() -> None: + await client.filestorage.folders.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/folders", @@ -387,9 +395,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedFolderList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedFolderList, parse_obj_as(type_=PaginatedFolderList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -426,6 +434,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.filestorage import FolderRequest @@ -433,9 +443,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.folders.create( - model=FolderRequest(), - ) + + + async def main() -> None: + await client.filestorage.folders.create( + model=FolderRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/folders", @@ -445,9 +461,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FileStorageFolderResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FileStorageFolderResponse, parse_obj_as(type_=FileStorageFolderResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -484,15 +500,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.folders.retrieve( - id="id", - ) + + + async def main() -> None: + await client.filestorage.folders.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"filestorage/v1/folders/{jsonable_encoder(id)}", @@ -500,9 +524,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Folder, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Folder, parse_obj_as(type_=Folder, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -524,20 +548,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.folders.meta_post_retrieve() + + + async def main() -> None: + await client.filestorage.folders.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/folders/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/force_resync/client.py b/src/merge/resources/filestorage/resources/force_resync/client.py index bfe442bb..b2cc6a05 100644 --- a/src/merge/resources/filestorage/resources/force_resync/client.py +++ b/src/merge/resources/filestorage/resources/force_resync/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.sync_status import SyncStatus @@ -43,9 +43,9 @@ def sync_status_resync_create( _response = self._client_wrapper.httpx_client.request( "filestorage/v1/sync-status/resync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[SyncStatus], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[SyncStatus], parse_obj_as(type_=typing.List[SyncStatus], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -74,20 +74,28 @@ async def sync_status_resync_create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.force_resync.sync_status_resync_create() + + + async def main() -> None: + await client.filestorage.force_resync.sync_status_resync_create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/sync-status/resync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[SyncStatus], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[SyncStatus], parse_obj_as(type_=typing.List[SyncStatus], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/generate_key/client.py b/src/merge/resources/filestorage/resources/generate_key/client.py index 96b5339a..81429abc 100644 --- a/src/merge/resources/filestorage/resources/generate_key/client.py +++ b/src/merge/resources/filestorage/resources/generate_key/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.remote_key import RemoteKey @@ -53,9 +53,9 @@ def create(self, *, name: str, request_options: typing.Optional[RequestOptions] request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -85,15 +85,23 @@ async def create(self, *, name: str, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.generate_key.create( - name="Remote Deployment Key 1", - ) + + + async def main() -> None: + await client.filestorage.generate_key.create( + name="Remote Deployment Key 1", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/generate-key", @@ -102,9 +110,9 @@ async def create(self, *, name: str, request_options: typing.Optional[RequestOpt request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/groups/client.py b/src/merge/resources/filestorage/resources/groups/client.py index 92628afc..ebec2528 100644 --- a/src/merge/resources/filestorage/resources/groups/client.py +++ b/src/merge/resources/filestorage/resources/groups/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.group import Group from ...types.paginated_group_list import PaginatedGroupList @@ -98,9 +98,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedGroupList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedGroupList, parse_obj_as(type_=PaginatedGroupList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -149,9 +149,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Group, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Group, parse_obj_as(type_=Group, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -218,13 +218,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.groups.list() + + + async def main() -> None: + await client.filestorage.groups.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/groups", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedGroupList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedGroupList, parse_obj_as(type_=PaginatedGroupList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -277,15 +285,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.groups.retrieve( - id="id", - ) + + + async def main() -> None: + await client.filestorage.groups.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"filestorage/v1/groups/{jsonable_encoder(id)}", @@ -293,9 +309,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Group, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Group, parse_obj_as(type_=Group, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/issues/client.py b/src/merge/resources/filestorage/resources/issues/client.py index 0d360248..5d76841f 100644 --- a/src/merge/resources/filestorage/resources/issues/client.py +++ b/src/merge/resources/filestorage/resources/issues/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.issue import Issue from ...types.paginated_issue_list import PaginatedIssueList @@ -127,9 +127,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedIssueList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedIssueList, parse_obj_as(type_=PaginatedIssueList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -166,9 +166,9 @@ def retrieve(self, id: str, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( f"filestorage/v1/issues/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Issue, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Issue, parse_obj_as(type_=Issue, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -251,13 +251,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.issues.list() + + + async def main() -> None: + await client.filestorage.issues.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/issues", @@ -287,9 +295,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedIssueList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedIssueList, parse_obj_as(type_=PaginatedIssueList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -313,22 +321,30 @@ async def retrieve(self, id: str, *, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.issues.retrieve( - id="id", - ) + + + async def main() -> None: + await client.filestorage.issues.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"filestorage/v1/issues/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Issue, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Issue, parse_obj_as(type_=Issue, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/link_token/client.py b/src/merge/resources/filestorage/resources/link_token/client.py index 4056dfc4..b30fb31f 100644 --- a/src/merge/resources/filestorage/resources/link_token/client.py +++ b/src/merge/resources/filestorage/resources/link_token/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.categories_enum import CategoriesEnum from ...types.common_model_scopes_body_request import CommonModelScopesBodyRequest @@ -119,9 +119,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LinkToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(LinkToken, parse_obj_as(type_=LinkToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -198,6 +198,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.filestorage import CategoriesEnum @@ -205,12 +207,18 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.link_token.create( - end_user_email_address="example@gmail.com", - end_user_organization_name="Test Organization", - end_user_origin_id="12345", - categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], - ) + + + async def main() -> None: + await client.filestorage.link_token.create( + end_user_email_address="example@gmail.com", + end_user_organization_name="Test Organization", + end_user_origin_id="12345", + categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/link-token", @@ -231,9 +239,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LinkToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(LinkToken, parse_obj_as(type_=LinkToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/linked_accounts/client.py b/src/merge/resources/filestorage/resources/linked_accounts/client.py index a5b33fbe..37089d25 100644 --- a/src/merge/resources/filestorage/resources/linked_accounts/client.py +++ b/src/merge/resources/filestorage/resources/linked_accounts/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_account_details_and_actions_list import PaginatedAccountDetailsAndActionsList from .types.linked_accounts_list_request_category import LinkedAccountsListRequestCategory @@ -122,9 +122,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountDetailsAndActionsList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountDetailsAndActionsList, parse_obj_as(type_=PaginatedAccountDetailsAndActionsList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -214,13 +214,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.linked_accounts.list() + + + async def main() -> None: + await client.filestorage.linked_accounts.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/linked-accounts", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountDetailsAndActionsList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountDetailsAndActionsList, parse_obj_as(type_=PaginatedAccountDetailsAndActionsList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/passthrough/client.py b/src/merge/resources/filestorage/resources/passthrough/client.py index 62ec9abf..27402f62 100644 --- a/src/merge/resources/filestorage/resources/passthrough/client.py +++ b/src/merge/resources/filestorage/resources/passthrough/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.data_passthrough_request import DataPassthroughRequest from ...types.remote_response import RemoteResponse @@ -55,9 +55,9 @@ def create( _response = self._client_wrapper.httpx_client.request( "filestorage/v1/passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -88,6 +88,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.filestorage import DataPassthroughRequest, MethodEnum @@ -95,19 +97,25 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.passthrough.create( - request=DataPassthroughRequest( - method=MethodEnum.GET, - path="/scooters", - ), - ) + + + async def main() -> None: + await client.filestorage.passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/regenerate_key/client.py b/src/merge/resources/filestorage/resources/regenerate_key/client.py index 4ecb9896..c49b6614 100644 --- a/src/merge/resources/filestorage/resources/regenerate_key/client.py +++ b/src/merge/resources/filestorage/resources/regenerate_key/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.remote_key import RemoteKey @@ -53,9 +53,9 @@ def create(self, *, name: str, request_options: typing.Optional[RequestOptions] request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -85,15 +85,23 @@ async def create(self, *, name: str, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.regenerate_key.create( - name="Remote Deployment Key 1", - ) + + + async def main() -> None: + await client.filestorage.regenerate_key.create( + name="Remote Deployment Key 1", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/regenerate-key", @@ -102,9 +110,9 @@ async def create(self, *, name: str, request_options: typing.Optional[RequestOpt request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/scopes/client.py b/src/merge/resources/filestorage/resources/scopes/client.py index 595aa2fd..31261f7f 100644 --- a/src/merge/resources/filestorage/resources/scopes/client.py +++ b/src/merge/resources/filestorage/resources/scopes/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.common_model_scope_api import CommonModelScopeApi from ...types.individual_common_model_scope_deserializer_request import IndividualCommonModelScopeDeserializerRequest @@ -47,9 +47,9 @@ def default_scopes_retrieve( _response = self._client_wrapper.httpx_client.request( "filestorage/v1/default-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -84,9 +84,9 @@ def linked_account_scopes_retrieve( _response = self._client_wrapper.httpx_client.request( "filestorage/v1/linked-account-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -157,9 +157,9 @@ def linked_account_scopes_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -188,20 +188,28 @@ async def default_scopes_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.scopes.default_scopes_retrieve() + + + async def main() -> None: + await client.filestorage.scopes.default_scopes_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/default-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -225,20 +233,28 @@ async def linked_account_scopes_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.scopes.linked_account_scopes_retrieve() + + + async def main() -> None: + await client.filestorage.scopes.linked_account_scopes_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/linked-account-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -268,6 +284,8 @@ async def linked_account_scopes_create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.filestorage import ( IndividualCommonModelScopeDeserializerRequest, @@ -278,29 +296,35 @@ async def linked_account_scopes_create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.scopes.linked_account_scopes_create( - common_models=[ - IndividualCommonModelScopeDeserializerRequest( - model_name="Employee", - model_permissions={ - "READ": ModelPermissionDeserializerRequest( - is_enabled=True, - ), - "WRITE": ModelPermissionDeserializerRequest( - is_enabled=False, - ), - }, - ), - IndividualCommonModelScopeDeserializerRequest( - model_name="Benefit", - model_permissions={ - "WRITE": ModelPermissionDeserializerRequest( - is_enabled=False, - ) - }, - ), - ], - ) + + + async def main() -> None: + await client.filestorage.scopes.linked_account_scopes_create( + common_models=[ + IndividualCommonModelScopeDeserializerRequest( + model_name="Employee", + model_permissions={ + "READ": ModelPermissionDeserializerRequest( + is_enabled=True, + ), + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ), + }, + ), + IndividualCommonModelScopeDeserializerRequest( + model_name="Benefit", + model_permissions={ + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ) + }, + ), + ], + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/linked-account-scopes", @@ -309,9 +333,9 @@ async def linked_account_scopes_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/sync_status/client.py b/src/merge/resources/filestorage/resources/sync_status/client.py index 38260add..27a334be 100644 --- a/src/merge/resources/filestorage/resources/sync_status/client.py +++ b/src/merge/resources/filestorage/resources/sync_status/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_sync_status_list import PaginatedSyncStatusList @@ -56,9 +56,9 @@ def list( params={"cursor": cursor, "page_size": page_size}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedSyncStatusList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedSyncStatusList, parse_obj_as(type_=PaginatedSyncStatusList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -97,13 +97,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.sync_status.list() + + + async def main() -> None: + await client.filestorage.sync_status.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/sync-status", @@ -111,9 +119,9 @@ async def list( params={"cursor": cursor, "page_size": page_size}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedSyncStatusList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedSyncStatusList, parse_obj_as(type_=PaginatedSyncStatusList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/users/client.py b/src/merge/resources/filestorage/resources/users/client.py index 7aaa2879..ede93814 100644 --- a/src/merge/resources/filestorage/resources/users/client.py +++ b/src/merge/resources/filestorage/resources/users/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_user_list import PaginatedUserList from ...types.user import User @@ -103,9 +103,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedUserList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedUserList, parse_obj_as(type_=PaginatedUserList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -154,9 +154,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(User, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(User, parse_obj_as(type_=User, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -227,13 +227,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.users.list() + + + async def main() -> None: + await client.filestorage.users.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/users", @@ -252,9 +260,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedUserList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedUserList, parse_obj_as(type_=PaginatedUserList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -287,15 +295,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.users.retrieve( - id="id", - ) + + + async def main() -> None: + await client.filestorage.users.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"filestorage/v1/users/{jsonable_encoder(id)}", @@ -303,9 +319,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(User, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(User, parse_obj_as(type_=User, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/resources/webhook_receivers/client.py b/src/merge/resources/filestorage/resources/webhook_receivers/client.py index 5531f72c..51ce3088 100644 --- a/src/merge/resources/filestorage/resources/webhook_receivers/client.py +++ b/src/merge/resources/filestorage/resources/webhook_receivers/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.webhook_receiver import WebhookReceiver @@ -44,9 +44,9 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty _response = self._client_wrapper.httpx_client.request( "filestorage/v1/webhook-receivers", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[WebhookReceiver], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[WebhookReceiver], parse_obj_as(type_=typing.List[WebhookReceiver], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -99,9 +99,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(WebhookReceiver, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(WebhookReceiver, parse_obj_as(type_=WebhookReceiver, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -128,20 +128,28 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.webhook_receivers.list() + + + async def main() -> None: + await client.filestorage.webhook_receivers.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/webhook-receivers", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[WebhookReceiver], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[WebhookReceiver], parse_obj_as(type_=typing.List[WebhookReceiver], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -176,16 +184,24 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.filestorage.webhook_receivers.create( - event="event", - is_active=True, - ) + + + async def main() -> None: + await client.filestorage.webhook_receivers.create( + event="event", + is_active=True, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "filestorage/v1/webhook-receivers", @@ -194,9 +210,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(WebhookReceiver, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(WebhookReceiver, parse_obj_as(type_=WebhookReceiver, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/filestorage/types/account_details.py b/src/merge/resources/filestorage/types/account_details.py index 81f9587b..58cd348a 100644 --- a/src/merge/resources/filestorage/types/account_details.py +++ b/src/merge/resources/filestorage/types/account_details.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .category_enum import CategoryEnum -class AccountDetails(pydantic_v1.BaseModel): +class AccountDetails(UniversalBaseModel): id: typing.Optional[str] integration: typing.Optional[str] integration_slug: typing.Optional[str] @@ -18,27 +18,18 @@ class AccountDetails(pydantic_v1.BaseModel): end_user_email_address: typing.Optional[str] status: typing.Optional[str] webhook_listener_url: typing.Optional[str] - is_duplicate: typing.Optional[bool] = pydantic_v1.Field() + is_duplicate: typing.Optional[bool] = pydantic.Field() """ Whether a Production Linked Account's credentials match another existing Production Linked Account. This field is `null` for Test Linked Accounts, incomplete Production Linked Accounts, and ignored duplicate Production Linked Account sets. """ account_type: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/account_details_and_actions.py b/src/merge/resources/filestorage/types/account_details_and_actions.py index 6a1b2874..ffc38f58 100644 --- a/src/merge/resources/filestorage/types/account_details_and_actions.py +++ b/src/merge/resources/filestorage/types/account_details_and_actions.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_details_and_actions_integration import AccountDetailsAndActionsIntegration from .account_details_and_actions_status_enum import AccountDetailsAndActionsStatusEnum from .category_enum import CategoryEnum -class AccountDetailsAndActions(pydantic_v1.BaseModel): +class AccountDetailsAndActions(UniversalBaseModel): """ # The LinkedAccount Object @@ -30,13 +30,13 @@ class AccountDetailsAndActions(pydantic_v1.BaseModel): end_user_origin_id: typing.Optional[str] end_user_organization_name: str end_user_email_address: str - subdomain: typing.Optional[str] = pydantic_v1.Field() + subdomain: typing.Optional[str] = pydantic.Field() """ The tenant or domain the customer has provided access to. """ webhook_listener_url: str - is_duplicate: typing.Optional[bool] = pydantic_v1.Field() + is_duplicate: typing.Optional[bool] = pydantic.Field() """ Whether a Production Linked Account's credentials match another existing Production Linked Account. This field is `null` for Test Linked Accounts, incomplete Production Linked Accounts, and ignored duplicate Production Linked Account sets. """ @@ -44,20 +44,11 @@ class AccountDetailsAndActions(pydantic_v1.BaseModel): integration: typing.Optional[AccountDetailsAndActionsIntegration] account_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/account_details_and_actions_integration.py b/src/merge/resources/filestorage/types/account_details_and_actions_integration.py index 7c300a2b..8aa63726 100644 --- a/src/merge/resources/filestorage/types/account_details_and_actions_integration.py +++ b/src/merge/resources/filestorage/types/account_details_and_actions_integration.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .categories_enum import CategoriesEnum from .model_operation import ModelOperation -class AccountDetailsAndActionsIntegration(pydantic_v1.BaseModel): +class AccountDetailsAndActionsIntegration(UniversalBaseModel): name: str categories: typing.List[CategoriesEnum] image: typing.Optional[str] @@ -19,20 +19,11 @@ class AccountDetailsAndActionsIntegration(pydantic_v1.BaseModel): passthrough_available: bool available_model_operations: typing.Optional[typing.List[ModelOperation]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/account_integration.py b/src/merge/resources/filestorage/types/account_integration.py index 57006e35..7a8a27ab 100644 --- a/src/merge/resources/filestorage/types/account_integration.py +++ b/src/merge/resources/filestorage/types/account_integration.py @@ -1,69 +1,60 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .categories_enum import CategoriesEnum -class AccountIntegration(pydantic_v1.BaseModel): - name: str = pydantic_v1.Field() +class AccountIntegration(UniversalBaseModel): + name: str = pydantic.Field() """ Company name. """ - categories: typing.Optional[typing.List[CategoriesEnum]] = pydantic_v1.Field() + categories: typing.Optional[typing.List[CategoriesEnum]] = pydantic.Field() """ Category or categories this integration belongs to. Multiple categories should be comma separated, i.e. [ats, hris]. """ - image: typing.Optional[str] = pydantic_v1.Field() + image: typing.Optional[str] = pydantic.Field() """ Company logo in rectangular shape. Upload an image with a clear background. """ - square_image: typing.Optional[str] = pydantic_v1.Field() + square_image: typing.Optional[str] = pydantic.Field() """ Company logo in square shape. Upload an image with a white background. """ - color: typing.Optional[str] = pydantic_v1.Field() + color: typing.Optional[str] = pydantic.Field() """ The color of this integration used for buttons and text throughout the app and landing pages. Choose a darker, saturated color. """ slug: typing.Optional[str] - api_endpoints_to_documentation_urls: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + api_endpoints_to_documentation_urls: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ Mapping of API endpoints to documentation urls for support. Example: {'GET': [['/common-model-scopes', 'https://docs.merge.dev/accounting/common-model-scopes/#common_model_scopes_retrieve'],['/common-model-actions', 'https://docs.merge.dev/accounting/common-model-actions/#common_model_actions_retrieve']], 'POST': []} """ - webhook_setup_guide_url: typing.Optional[str] = pydantic_v1.Field() + webhook_setup_guide_url: typing.Optional[str] = pydantic.Field() """ Setup guide URL for third party webhook creation. Exposed in Merge Docs. """ - category_beta_status: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + category_beta_status: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ Category or categories this integration is in beta status for. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/account_token.py b/src/merge/resources/filestorage/types/account_token.py index 4794fea3..c280c7cb 100644 --- a/src/merge/resources/filestorage/types/account_token.py +++ b/src/merge/resources/filestorage/types/account_token.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_integration import AccountIntegration -class AccountToken(pydantic_v1.BaseModel): +class AccountToken(UniversalBaseModel): account_token: str integration: AccountIntegration - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/advanced_metadata.py b/src/merge/resources/filestorage/types/advanced_metadata.py index 2aa7d1d6..5b0ec9e2 100644 --- a/src/merge/resources/filestorage/types/advanced_metadata.py +++ b/src/merge/resources/filestorage/types/advanced_metadata.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class AdvancedMetadata(pydantic_v1.BaseModel): + +class AdvancedMetadata(UniversalBaseModel): id: str display_name: typing.Optional[str] description: typing.Optional[str] @@ -15,20 +15,11 @@ class AdvancedMetadata(pydantic_v1.BaseModel): is_custom: typing.Optional[bool] field_choices: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/async_passthrough_reciept.py b/src/merge/resources/filestorage/types/async_passthrough_reciept.py index 2cc33210..f2144443 100644 --- a/src/merge/resources/filestorage/types/async_passthrough_reciept.py +++ b/src/merge/resources/filestorage/types/async_passthrough_reciept.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class AsyncPassthroughReciept(pydantic_v1.BaseModel): - async_passthrough_receipt_id: str - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class AsyncPassthroughReciept(UniversalBaseModel): + async_passthrough_receipt_id: str - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/audit_log_event.py b/src/merge/resources/filestorage/types/audit_log_event.py index 43be24a5..c0b4e43a 100644 --- a/src/merge/resources/filestorage/types/audit_log_event.py +++ b/src/merge/resources/filestorage/types/audit_log_event.py @@ -3,25 +3,26 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .audit_log_event_event_type import AuditLogEventEventType from .audit_log_event_role import AuditLogEventRole -class AuditLogEvent(pydantic_v1.BaseModel): +class AuditLogEvent(UniversalBaseModel): id: typing.Optional[str] - user_name: typing.Optional[str] = pydantic_v1.Field() + user_name: typing.Optional[str] = pydantic.Field() """ The User's full name at the time of this Event occurring. """ - user_email: typing.Optional[str] = pydantic_v1.Field() + user_email: typing.Optional[str] = pydantic.Field() """ The User's email at the time of this Event occurring. """ - role: AuditLogEventRole = pydantic_v1.Field() + role: AuditLogEventRole = pydantic.Field() """ Designates the role of the user (or SYSTEM/API if action not taken by a user) at the time of this Event occurring. @@ -34,7 +35,7 @@ class AuditLogEvent(pydantic_v1.BaseModel): """ ip_address: str - event_type: AuditLogEventEventType = pydantic_v1.Field() + event_type: AuditLogEventEventType = pydantic.Field() """ Designates the type of event that occurred. @@ -80,20 +81,11 @@ class AuditLogEvent(pydantic_v1.BaseModel): event_description: str created_at: typing.Optional[dt.datetime] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/available_actions.py b/src/merge/resources/filestorage/types/available_actions.py index bbd94581..1f1d424c 100644 --- a/src/merge/resources/filestorage/types/available_actions.py +++ b/src/merge/resources/filestorage/types/available_actions.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_integration import AccountIntegration from .model_operation import ModelOperation -class AvailableActions(pydantic_v1.BaseModel): +class AvailableActions(UniversalBaseModel): """ # The AvailableActions Object @@ -26,20 +26,11 @@ class AvailableActions(pydantic_v1.BaseModel): passthrough_available: bool available_model_operations: typing.Optional[typing.List[ModelOperation]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/common_model_scope_api.py b/src/merge/resources/filestorage/types/common_model_scope_api.py index d6fdec56..093001ba 100644 --- a/src/merge/resources/filestorage/types/common_model_scope_api.py +++ b/src/merge/resources/filestorage/types/common_model_scope_api.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .individual_common_model_scope_deserializer import IndividualCommonModelScopeDeserializer -class CommonModelScopeApi(pydantic_v1.BaseModel): - common_models: typing.List[IndividualCommonModelScopeDeserializer] = pydantic_v1.Field() +class CommonModelScopeApi(UniversalBaseModel): + common_models: typing.List[IndividualCommonModelScopeDeserializer] = pydantic.Field() """ The common models you want to update the scopes for """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/common_model_scopes_body_request.py b/src/merge/resources/filestorage/types/common_model_scopes_body_request.py index e956cbdc..9098f031 100644 --- a/src/merge/resources/filestorage/types/common_model_scopes_body_request.py +++ b/src/merge/resources/filestorage/types/common_model_scopes_body_request.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .enabled_actions_enum import EnabledActionsEnum -class CommonModelScopesBodyRequest(pydantic_v1.BaseModel): +class CommonModelScopesBodyRequest(UniversalBaseModel): model_id: str enabled_actions: typing.List[EnabledActionsEnum] disabled_fields: typing.List[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/data_passthrough_request.py b/src/merge/resources/filestorage/types/data_passthrough_request.py index fdf9b18c..3f9ee090 100644 --- a/src/merge/resources/filestorage/types/data_passthrough_request.py +++ b/src/merge/resources/filestorage/types/data_passthrough_request.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .method_enum import MethodEnum from .multipart_form_field_request import MultipartFormFieldRequest from .request_format_enum import RequestFormatEnum -class DataPassthroughRequest(pydantic_v1.BaseModel): +class DataPassthroughRequest(UniversalBaseModel): """ # The DataPassthrough Object @@ -24,51 +24,42 @@ class DataPassthroughRequest(pydantic_v1.BaseModel): """ method: MethodEnum - path: str = pydantic_v1.Field() + path: str = pydantic.Field() """ The path of the request in the third party's platform. """ - base_url_override: typing.Optional[str] = pydantic_v1.Field() + base_url_override: typing.Optional[str] = pydantic.Field() """ An optional override of the third party's base url for the request. """ - data: typing.Optional[str] = pydantic_v1.Field() + data: typing.Optional[str] = pydantic.Field() """ The data with the request. You must include a `request_format` parameter matching the data's format """ - multipart_form_data: typing.Optional[typing.List[MultipartFormFieldRequest]] = pydantic_v1.Field() + multipart_form_data: typing.Optional[typing.List[MultipartFormFieldRequest]] = pydantic.Field() """ Pass an array of `MultipartFormField` objects in here instead of using the `data` param if `request_format` is set to `MULTIPART`. """ - headers: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + headers: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ The headers to use for the request (Merge will handle the account's authorization headers). `Content-Type` header is required for passthrough. Choose content type corresponding to expected format of receiving server. """ request_format: typing.Optional[RequestFormatEnum] - normalize_response: typing.Optional[bool] = pydantic_v1.Field() + normalize_response: typing.Optional[bool] = pydantic.Field() """ Optional. If true, the response will always be an object of the form `{"type": T, "value": ...}` where `T` will be one of `string, boolean, number, null, array, object`. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/debug_mode_log.py b/src/merge/resources/filestorage/types/debug_mode_log.py index 321c9090..8edea2ae 100644 --- a/src/merge/resources/filestorage/types/debug_mode_log.py +++ b/src/merge/resources/filestorage/types/debug_mode_log.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_model_log_summary import DebugModelLogSummary -class DebugModeLog(pydantic_v1.BaseModel): +class DebugModeLog(UniversalBaseModel): log_id: str dashboard_view: str log_summary: DebugModelLogSummary - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/debug_model_log_summary.py b/src/merge/resources/filestorage/types/debug_model_log_summary.py index 06bb154b..4f5b07db 100644 --- a/src/merge/resources/filestorage/types/debug_model_log_summary.py +++ b/src/merge/resources/filestorage/types/debug_model_log_summary.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class DebugModelLogSummary(pydantic_v1.BaseModel): + +class DebugModelLogSummary(UniversalBaseModel): url: str method: str status_code: int - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/drive.py b/src/merge/resources/filestorage/types/drive.py index 2b50659c..01f2f280 100644 --- a/src/merge/resources/filestorage/types/drive.py +++ b/src/merge/resources/filestorage/types/drive.py @@ -3,11 +3,12 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class Drive(pydantic_v1.BaseModel): + +class Drive(UniversalBaseModel): """ # The Drive Object @@ -21,37 +22,37 @@ class Drive(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The drive's name. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's drive was created. """ - drive_url: typing.Optional[str] = pydantic_v1.Field() + drive_url: typing.Optional[str] = pydantic.Field() """ The drive's url. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -59,20 +60,11 @@ class Drive(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[typing.Optional[typing.Dict[str, typing.Any]]]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/error_validation_problem.py b/src/merge/resources/filestorage/types/error_validation_problem.py index 425af45c..3838491d 100644 --- a/src/merge/resources/filestorage/types/error_validation_problem.py +++ b/src/merge/resources/filestorage/types/error_validation_problem.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .validation_problem_source import ValidationProblemSource -class ErrorValidationProblem(pydantic_v1.BaseModel): +class ErrorValidationProblem(UniversalBaseModel): source: typing.Optional[ValidationProblemSource] title: str detail: str problem_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/external_target_field_api.py b/src/merge/resources/filestorage/types/external_target_field_api.py index a97d536a..8a971c64 100644 --- a/src/merge/resources/filestorage/types/external_target_field_api.py +++ b/src/merge/resources/filestorage/types/external_target_field_api.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ExternalTargetFieldApi(pydantic_v1.BaseModel): + +class ExternalTargetFieldApi(UniversalBaseModel): name: typing.Optional[str] description: typing.Optional[str] is_mapped: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/external_target_field_api_response.py b/src/merge/resources/filestorage/types/external_target_field_api_response.py index bde996de..3d81dc30 100644 --- a/src/merge/resources/filestorage/types/external_target_field_api_response.py +++ b/src/merge/resources/filestorage/types/external_target_field_api_response.py @@ -1,36 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .external_target_field_api import ExternalTargetFieldApi - +import pydantic -class ExternalTargetFieldApiResponse(pydantic_v1.BaseModel): - file: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="File") - folder: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Folder") - drive: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Drive") - group: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Group") - user: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="User") +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .external_target_field_api import ExternalTargetFieldApi - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ExternalTargetFieldApiResponse(UniversalBaseModel): + file: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="File") + folder: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Folder") + drive: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Drive") + group: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Group") + user: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="User") - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/field_mapping_api_instance.py b/src/merge/resources/filestorage/types/field_mapping_api_instance.py index d9d7670d..8af85a52 100644 --- a/src/merge/resources/filestorage/types/field_mapping_api_instance.py +++ b/src/merge/resources/filestorage/types/field_mapping_api_instance.py @@ -1,34 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_mapping_api_instance_remote_field import FieldMappingApiInstanceRemoteField from .field_mapping_api_instance_target_field import FieldMappingApiInstanceTargetField -class FieldMappingApiInstance(pydantic_v1.BaseModel): +class FieldMappingApiInstance(UniversalBaseModel): id: typing.Optional[str] is_integration_wide: typing.Optional[bool] target_field: typing.Optional[FieldMappingApiInstanceTargetField] remote_field: typing.Optional[FieldMappingApiInstanceRemoteField] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/field_mapping_api_instance_remote_field.py b/src/merge/resources/filestorage/types/field_mapping_api_instance_remote_field.py index 9539d3dc..0635b76f 100644 --- a/src/merge/resources/filestorage/types/field_mapping_api_instance_remote_field.py +++ b/src/merge/resources/filestorage/types/field_mapping_api_instance_remote_field.py @@ -1,36 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_mapping_api_instance_remote_field_remote_endpoint_info import ( FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo, ) -class FieldMappingApiInstanceRemoteField(pydantic_v1.BaseModel): +class FieldMappingApiInstanceRemoteField(UniversalBaseModel): remote_key_name: str - schema_: typing.Dict[str, typing.Any] = pydantic_v1.Field(alias="schema") + schema_: typing.Dict[str, typing.Any] = pydantic.Field(alias="schema") remote_endpoint_info: FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py b/src/merge/resources/filestorage/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py index d9fcc276..e34eb6e4 100644 --- a/src/merge/resources/filestorage/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py +++ b/src/merge/resources/filestorage/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo(pydantic_v1.BaseModel): + +class FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo(UniversalBaseModel): method: typing.Optional[str] url_path: typing.Optional[str] field_traversal_path: typing.Optional[typing.List[str]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/field_mapping_api_instance_response.py b/src/merge/resources/filestorage/types/field_mapping_api_instance_response.py index 033aa66f..87840098 100644 --- a/src/merge/resources/filestorage/types/field_mapping_api_instance_response.py +++ b/src/merge/resources/filestorage/types/field_mapping_api_instance_response.py @@ -1,36 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .field_mapping_api_instance import FieldMappingApiInstance - +import pydantic -class FieldMappingApiInstanceResponse(pydantic_v1.BaseModel): - file: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="File") - folder: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Folder") - drive: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Drive") - group: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Group") - user: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="User") +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .field_mapping_api_instance import FieldMappingApiInstance - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class FieldMappingApiInstanceResponse(UniversalBaseModel): + file: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="File") + folder: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Folder") + drive: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Drive") + group: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Group") + user: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="User") - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/field_mapping_api_instance_target_field.py b/src/merge/resources/filestorage/types/field_mapping_api_instance_target_field.py index 25a8dcff..c590d4ce 100644 --- a/src/merge/resources/filestorage/types/field_mapping_api_instance_target_field.py +++ b/src/merge/resources/filestorage/types/field_mapping_api_instance_target_field.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldMappingApiInstanceTargetField(pydantic_v1.BaseModel): + +class FieldMappingApiInstanceTargetField(UniversalBaseModel): name: str description: str is_organization_wide: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/field_mapping_instance_response.py b/src/merge/resources/filestorage/types/field_mapping_instance_response.py index b55d2c40..aaf06f0e 100644 --- a/src/merge/resources/filestorage/types/field_mapping_instance_response.py +++ b/src/merge/resources/filestorage/types/field_mapping_instance_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .field_mapping_api_instance import FieldMappingApiInstance from .warning_validation_problem import WarningValidationProblem -class FieldMappingInstanceResponse(pydantic_v1.BaseModel): +class FieldMappingInstanceResponse(UniversalBaseModel): model: FieldMappingApiInstance warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/field_permission_deserializer.py b/src/merge/resources/filestorage/types/field_permission_deserializer.py index 124f3deb..ed80b9d6 100644 --- a/src/merge/resources/filestorage/types/field_permission_deserializer.py +++ b/src/merge/resources/filestorage/types/field_permission_deserializer.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldPermissionDeserializer(pydantic_v1.BaseModel): + +class FieldPermissionDeserializer(UniversalBaseModel): enabled: typing.Optional[typing.List[typing.Any]] disabled: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/field_permission_deserializer_request.py b/src/merge/resources/filestorage/types/field_permission_deserializer_request.py index 65e80e75..e937e743 100644 --- a/src/merge/resources/filestorage/types/field_permission_deserializer_request.py +++ b/src/merge/resources/filestorage/types/field_permission_deserializer_request.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldPermissionDeserializerRequest(pydantic_v1.BaseModel): + +class FieldPermissionDeserializerRequest(UniversalBaseModel): enabled: typing.Optional[typing.List[typing.Any]] disabled: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/file.py b/src/merge/resources/filestorage/types/file.py index 527ef52b..bad47a4c 100644 --- a/src/merge/resources/filestorage/types/file.py +++ b/src/merge/resources/filestorage/types/file.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .file_drive import FileDrive from .file_folder import FileFolder from .file_permissions import FilePermissions -class File(pydantic_v1.BaseModel): +class File(UniversalBaseModel): """ # The File Object @@ -24,77 +25,77 @@ class File(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The file's name. """ - file_url: typing.Optional[str] = pydantic_v1.Field() + file_url: typing.Optional[str] = pydantic.Field() """ The URL to access the file. """ - file_thumbnail_url: typing.Optional[str] = pydantic_v1.Field() + file_thumbnail_url: typing.Optional[str] = pydantic.Field() """ The URL that produces a thumbnail preview of the file. Typically an image. """ - size: typing.Optional[int] = pydantic_v1.Field() + size: typing.Optional[int] = pydantic.Field() """ The file's size, in bytes. """ - mime_type: typing.Optional[str] = pydantic_v1.Field() + mime_type: typing.Optional[str] = pydantic.Field() """ The file's mime type. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The file's description. """ - folder: typing.Optional[FileFolder] = pydantic_v1.Field() + folder: typing.Optional[FileFolder] = pydantic.Field() """ The folder that the file belongs to. """ - permissions: typing.Optional[FilePermissions] = pydantic_v1.Field() + permissions: typing.Optional[FilePermissions] = pydantic.Field() """ The Permission object is used to represent a user's or group's access to a File or Folder. Permissions are unexpanded by default. Use the query param `expand=permissions` to see more details under `GET /files`. """ - drive: typing.Optional[FileDrive] = pydantic_v1.Field() + drive: typing.Optional[FileDrive] = pydantic.Field() """ The drive that the file belongs to. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's file was created. """ - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's file was updated. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -102,20 +103,11 @@ class File(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[typing.Optional[typing.Dict[str, typing.Any]]]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/file_request.py b/src/merge/resources/filestorage/types/file_request.py index bc462830..f16d03b2 100644 --- a/src/merge/resources/filestorage/types/file_request.py +++ b/src/merge/resources/filestorage/types/file_request.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .file_request_drive import FileRequestDrive from .file_request_folder import FileRequestFolder from .file_request_permissions import FileRequestPermissions -class FileRequest(pydantic_v1.BaseModel): +class FileRequest(UniversalBaseModel): """ # The File Object @@ -23,47 +23,47 @@ class FileRequest(pydantic_v1.BaseModel): Fetch from the `GET /api/filestorage/v1/files` endpoint and view their files. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The file's name. """ - file_url: typing.Optional[str] = pydantic_v1.Field() + file_url: typing.Optional[str] = pydantic.Field() """ The URL to access the file. """ - file_thumbnail_url: typing.Optional[str] = pydantic_v1.Field() + file_thumbnail_url: typing.Optional[str] = pydantic.Field() """ The URL that produces a thumbnail preview of the file. Typically an image. """ - size: typing.Optional[int] = pydantic_v1.Field() + size: typing.Optional[int] = pydantic.Field() """ The file's size, in bytes. """ - mime_type: typing.Optional[str] = pydantic_v1.Field() + mime_type: typing.Optional[str] = pydantic.Field() """ The file's mime type. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The file's description. """ - folder: typing.Optional[FileRequestFolder] = pydantic_v1.Field() + folder: typing.Optional[FileRequestFolder] = pydantic.Field() """ The folder that the file belongs to. """ - permissions: typing.Optional[FileRequestPermissions] = pydantic_v1.Field() + permissions: typing.Optional[FileRequestPermissions] = pydantic.Field() """ The Permission object is used to represent a user's or group's access to a File or Folder. Permissions are unexpanded by default. Use the query param `expand=permissions` to see more details under `GET /files`. """ - drive: typing.Optional[FileRequestDrive] = pydantic_v1.Field() + drive: typing.Optional[FileRequestDrive] = pydantic.Field() """ The drive that the file belongs to. """ @@ -71,20 +71,11 @@ class FileRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/file_storage_file_response.py b/src/merge/resources/filestorage/types/file_storage_file_response.py index fe5dbf60..fd9bd632 100644 --- a/src/merge/resources/filestorage/types/file_storage_file_response.py +++ b/src/merge/resources/filestorage/types/file_storage_file_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .file import File from .warning_validation_problem import WarningValidationProblem -class FileStorageFileResponse(pydantic_v1.BaseModel): +class FileStorageFileResponse(UniversalBaseModel): model: File warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/file_storage_folder_response.py b/src/merge/resources/filestorage/types/file_storage_folder_response.py index 07707cb4..8302ff3d 100644 --- a/src/merge/resources/filestorage/types/file_storage_folder_response.py +++ b/src/merge/resources/filestorage/types/file_storage_folder_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .folder import Folder from .warning_validation_problem import WarningValidationProblem -class FileStorageFolderResponse(pydantic_v1.BaseModel): +class FileStorageFolderResponse(UniversalBaseModel): model: Folder warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/folder.py b/src/merge/resources/filestorage/types/folder.py index 789a08e1..c5c806f8 100644 --- a/src/merge/resources/filestorage/types/folder.py +++ b/src/merge/resources/filestorage/types/folder.py @@ -5,13 +5,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel, update_forward_refs from .folder_drive import FolderDrive from .folder_permissions import FolderPermissions -class Folder(pydantic_v1.BaseModel): +class Folder(UniversalBaseModel): """ # The Folder Object @@ -25,67 +26,67 @@ class Folder(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The folder's name. """ - folder_url: typing.Optional[str] = pydantic_v1.Field() + folder_url: typing.Optional[str] = pydantic.Field() """ The URL to access the folder. """ - size: typing.Optional[int] = pydantic_v1.Field() + size: typing.Optional[int] = pydantic.Field() """ The folder's size, in bytes. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The folder's description. """ - parent_folder: typing.Optional[FolderParentFolder] = pydantic_v1.Field() + parent_folder: typing.Optional[FolderParentFolder] = pydantic.Field() """ The folder that the folder belongs to. """ - drive: typing.Optional[FolderDrive] = pydantic_v1.Field() + drive: typing.Optional[FolderDrive] = pydantic.Field() """ The drive that the folder belongs to. """ - permissions: typing.Optional[FolderPermissions] = pydantic_v1.Field() + permissions: typing.Optional[FolderPermissions] = pydantic.Field() """ The Permission object is used to represent a user's or group's access to a File or Folder. Permissions are unexpanded by default. Use the query param `expand=permissions` to see more details under `GET /folders`. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's folder was created. """ - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's folder was updated. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -93,25 +94,16 @@ class Folder(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[typing.Optional[typing.Dict[str, typing.Any]]]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow from .folder_parent_folder import FolderParentFolder # noqa: E402 -Folder.update_forward_refs() +update_forward_refs(Folder) diff --git a/src/merge/resources/filestorage/types/folder_request.py b/src/merge/resources/filestorage/types/folder_request.py index e4472dea..6f66e17e 100644 --- a/src/merge/resources/filestorage/types/folder_request.py +++ b/src/merge/resources/filestorage/types/folder_request.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .folder_request_drive import FolderRequestDrive from .folder_request_parent_folder import FolderRequestParentFolder from .folder_request_permissions import FolderRequestPermissions -class FolderRequest(pydantic_v1.BaseModel): +class FolderRequest(UniversalBaseModel): """ # The Folder Object @@ -23,37 +23,37 @@ class FolderRequest(pydantic_v1.BaseModel): Fetch from the `GET /api/filestorage/v1/folders` endpoint and view their folders. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The folder's name. """ - folder_url: typing.Optional[str] = pydantic_v1.Field() + folder_url: typing.Optional[str] = pydantic.Field() """ The URL to access the folder. """ - size: typing.Optional[int] = pydantic_v1.Field() + size: typing.Optional[int] = pydantic.Field() """ The folder's size, in bytes. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The folder's description. """ - parent_folder: typing.Optional[FolderRequestParentFolder] = pydantic_v1.Field() + parent_folder: typing.Optional[FolderRequestParentFolder] = pydantic.Field() """ The folder that the folder belongs to. """ - drive: typing.Optional[FolderRequestDrive] = pydantic_v1.Field() + drive: typing.Optional[FolderRequestDrive] = pydantic.Field() """ The drive that the folder belongs to. """ - permissions: typing.Optional[FolderRequestPermissions] = pydantic_v1.Field() + permissions: typing.Optional[FolderRequestPermissions] = pydantic.Field() """ The Permission object is used to represent a user's or group's access to a File or Folder. Permissions are unexpanded by default. Use the query param `expand=permissions` to see more details under `GET /folders`. """ @@ -61,20 +61,11 @@ class FolderRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/group.py b/src/merge/resources/filestorage/types/group.py index bafebf3f..c69f315e 100644 --- a/src/merge/resources/filestorage/types/group.py +++ b/src/merge/resources/filestorage/types/group.py @@ -3,11 +3,12 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class Group(pydantic_v1.BaseModel): + +class Group(UniversalBaseModel): """ # The Group Object @@ -21,32 +22,32 @@ class Group(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The group's name. """ - users: typing.List[str] = pydantic_v1.Field() + users: typing.List[str] = pydantic.Field() """ The users that belong in the group. If null, this typically means it's either a domain or the third-party platform does not surface this information. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -54,20 +55,11 @@ class Group(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[typing.Optional[typing.Dict[str, typing.Any]]]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/individual_common_model_scope_deserializer.py b/src/merge/resources/filestorage/types/individual_common_model_scope_deserializer.py index d80ca06e..ffa55055 100644 --- a/src/merge/resources/filestorage/types/individual_common_model_scope_deserializer.py +++ b/src/merge/resources/filestorage/types/individual_common_model_scope_deserializer.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_permission_deserializer import FieldPermissionDeserializer from .model_permission_deserializer import ModelPermissionDeserializer -class IndividualCommonModelScopeDeserializer(pydantic_v1.BaseModel): +class IndividualCommonModelScopeDeserializer(UniversalBaseModel): model_name: str model_permissions: typing.Optional[typing.Dict[str, ModelPermissionDeserializer]] field_permissions: typing.Optional[FieldPermissionDeserializer] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/individual_common_model_scope_deserializer_request.py b/src/merge/resources/filestorage/types/individual_common_model_scope_deserializer_request.py index 8f2e7de5..d0e68f6d 100644 --- a/src/merge/resources/filestorage/types/individual_common_model_scope_deserializer_request.py +++ b/src/merge/resources/filestorage/types/individual_common_model_scope_deserializer_request.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_permission_deserializer_request import FieldPermissionDeserializerRequest from .model_permission_deserializer_request import ModelPermissionDeserializerRequest -class IndividualCommonModelScopeDeserializerRequest(pydantic_v1.BaseModel): +class IndividualCommonModelScopeDeserializerRequest(UniversalBaseModel): model_name: str model_permissions: typing.Optional[typing.Dict[str, ModelPermissionDeserializerRequest]] field_permissions: typing.Optional[FieldPermissionDeserializerRequest] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/issue.py b/src/merge/resources/filestorage/types/issue.py index 086a0db2..28366731 100644 --- a/src/merge/resources/filestorage/types/issue.py +++ b/src/merge/resources/filestorage/types/issue.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .issue_status import IssueStatus -class Issue(pydantic_v1.BaseModel): +class Issue(UniversalBaseModel): id: typing.Optional[str] - status: typing.Optional[IssueStatus] = pydantic_v1.Field() + status: typing.Optional[IssueStatus] = pydantic.Field() """ Status of the issue. Options: ('ONGOING', 'RESOLVED') @@ -25,20 +26,11 @@ class Issue(pydantic_v1.BaseModel): is_muted: typing.Optional[bool] error_details: typing.Optional[typing.List[str]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/link_token.py b/src/merge/resources/filestorage/types/link_token.py index 1c82d1ac..87c88faf 100644 --- a/src/merge/resources/filestorage/types/link_token.py +++ b/src/merge/resources/filestorage/types/link_token.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class LinkToken(pydantic_v1.BaseModel): + +class LinkToken(UniversalBaseModel): link_token: str integration_name: typing.Optional[str] magic_link_url: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/linked_account_status.py b/src/merge/resources/filestorage/types/linked_account_status.py index 60e21a98..34184012 100644 --- a/src/merge/resources/filestorage/types/linked_account_status.py +++ b/src/merge/resources/filestorage/types/linked_account_status.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class LinkedAccountStatus(pydantic_v1.BaseModel): + +class LinkedAccountStatus(UniversalBaseModel): linked_account_status: str can_make_request: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/meta_response.py b/src/merge/resources/filestorage/types/meta_response.py index debaf4ef..27e02126 100644 --- a/src/merge/resources/filestorage/types/meta_response.py +++ b/src/merge/resources/filestorage/types/meta_response.py @@ -1,34 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .linked_account_status import LinkedAccountStatus -class MetaResponse(pydantic_v1.BaseModel): +class MetaResponse(UniversalBaseModel): request_schema: typing.Dict[str, typing.Any] remote_field_classes: typing.Optional[typing.Dict[str, typing.Any]] status: typing.Optional[LinkedAccountStatus] has_conditional_params: bool has_required_linked_account_params: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/model_operation.py b/src/merge/resources/filestorage/types/model_operation.py index 0f4429ec..efe8355e 100644 --- a/src/merge/resources/filestorage/types/model_operation.py +++ b/src/merge/resources/filestorage/types/model_operation.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelOperation(pydantic_v1.BaseModel): + +class ModelOperation(UniversalBaseModel): """ # The ModelOperation Object @@ -25,20 +25,11 @@ class ModelOperation(pydantic_v1.BaseModel): required_post_parameters: typing.List[str] supported_fields: typing.List[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/model_permission_deserializer.py b/src/merge/resources/filestorage/types/model_permission_deserializer.py index 5a6adf20..14bc4f99 100644 --- a/src/merge/resources/filestorage/types/model_permission_deserializer.py +++ b/src/merge/resources/filestorage/types/model_permission_deserializer.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelPermissionDeserializer(pydantic_v1.BaseModel): - is_enabled: typing.Optional[bool] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ModelPermissionDeserializer(UniversalBaseModel): + is_enabled: typing.Optional[bool] - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/model_permission_deserializer_request.py b/src/merge/resources/filestorage/types/model_permission_deserializer_request.py index 3f72b9ac..cc2e7f77 100644 --- a/src/merge/resources/filestorage/types/model_permission_deserializer_request.py +++ b/src/merge/resources/filestorage/types/model_permission_deserializer_request.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelPermissionDeserializerRequest(pydantic_v1.BaseModel): - is_enabled: typing.Optional[bool] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ModelPermissionDeserializerRequest(UniversalBaseModel): + is_enabled: typing.Optional[bool] - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/multipart_form_field_request.py b/src/merge/resources/filestorage/types/multipart_form_field_request.py index 9c8ffb21..b6a6c708 100644 --- a/src/merge/resources/filestorage/types/multipart_form_field_request.py +++ b/src/merge/resources/filestorage/types/multipart_form_field_request.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .multipart_form_field_request_encoding import MultipartFormFieldRequestEncoding -class MultipartFormFieldRequest(pydantic_v1.BaseModel): +class MultipartFormFieldRequest(UniversalBaseModel): """ # The MultipartFormField Object @@ -21,17 +21,17 @@ class MultipartFormFieldRequest(pydantic_v1.BaseModel): Create a `MultipartFormField` to define a multipart form entry. """ - name: str = pydantic_v1.Field() + name: str = pydantic.Field() """ The name of the form field """ - data: str = pydantic_v1.Field() + data: str = pydantic.Field() """ The data for the form field. """ - encoding: typing.Optional[MultipartFormFieldRequestEncoding] = pydantic_v1.Field() + encoding: typing.Optional[MultipartFormFieldRequestEncoding] = pydantic.Field() """ The encoding of the value of `data`. Defaults to `RAW` if not defined. @@ -40,30 +40,21 @@ class MultipartFormFieldRequest(pydantic_v1.BaseModel): - `GZIP_BASE64` - GZIP_BASE64 """ - file_name: typing.Optional[str] = pydantic_v1.Field() + file_name: typing.Optional[str] = pydantic.Field() """ The file name of the form field, if the field is for a file. """ - content_type: typing.Optional[str] = pydantic_v1.Field() + content_type: typing.Optional[str] = pydantic.Field() """ The MIME type of the file, if the field is for a file. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/paginated_account_details_and_actions_list.py b/src/merge/resources/filestorage/types/paginated_account_details_and_actions_list.py index 280100c4..07323330 100644 --- a/src/merge/resources/filestorage/types/paginated_account_details_and_actions_list.py +++ b/src/merge/resources/filestorage/types/paginated_account_details_and_actions_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_details_and_actions import AccountDetailsAndActions -class PaginatedAccountDetailsAndActionsList(pydantic_v1.BaseModel): +class PaginatedAccountDetailsAndActionsList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[AccountDetailsAndActions]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/paginated_audit_log_event_list.py b/src/merge/resources/filestorage/types/paginated_audit_log_event_list.py index 1d4154d2..e5e04fa7 100644 --- a/src/merge/resources/filestorage/types/paginated_audit_log_event_list.py +++ b/src/merge/resources/filestorage/types/paginated_audit_log_event_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .audit_log_event import AuditLogEvent -class PaginatedAuditLogEventList(pydantic_v1.BaseModel): +class PaginatedAuditLogEventList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[AuditLogEvent]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/paginated_drive_list.py b/src/merge/resources/filestorage/types/paginated_drive_list.py index 35f24d3c..c6c3da65 100644 --- a/src/merge/resources/filestorage/types/paginated_drive_list.py +++ b/src/merge/resources/filestorage/types/paginated_drive_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .drive import Drive -class PaginatedDriveList(pydantic_v1.BaseModel): +class PaginatedDriveList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Drive]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/paginated_file_list.py b/src/merge/resources/filestorage/types/paginated_file_list.py index 641dcb53..2d20ec26 100644 --- a/src/merge/resources/filestorage/types/paginated_file_list.py +++ b/src/merge/resources/filestorage/types/paginated_file_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .file import File -class PaginatedFileList(pydantic_v1.BaseModel): +class PaginatedFileList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[File]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/paginated_folder_list.py b/src/merge/resources/filestorage/types/paginated_folder_list.py index d13120a6..17ef4a1a 100644 --- a/src/merge/resources/filestorage/types/paginated_folder_list.py +++ b/src/merge/resources/filestorage/types/paginated_folder_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .folder import Folder -class PaginatedFolderList(pydantic_v1.BaseModel): +class PaginatedFolderList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Folder]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/paginated_group_list.py b/src/merge/resources/filestorage/types/paginated_group_list.py index 723ff3ca..16c73fad 100644 --- a/src/merge/resources/filestorage/types/paginated_group_list.py +++ b/src/merge/resources/filestorage/types/paginated_group_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .group import Group -class PaginatedGroupList(pydantic_v1.BaseModel): +class PaginatedGroupList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Group]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/paginated_issue_list.py b/src/merge/resources/filestorage/types/paginated_issue_list.py index 1016e29a..da8437f1 100644 --- a/src/merge/resources/filestorage/types/paginated_issue_list.py +++ b/src/merge/resources/filestorage/types/paginated_issue_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .issue import Issue -class PaginatedIssueList(pydantic_v1.BaseModel): +class PaginatedIssueList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Issue]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/paginated_sync_status_list.py b/src/merge/resources/filestorage/types/paginated_sync_status_list.py index 6c88197e..7faca80c 100644 --- a/src/merge/resources/filestorage/types/paginated_sync_status_list.py +++ b/src/merge/resources/filestorage/types/paginated_sync_status_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .sync_status import SyncStatus -class PaginatedSyncStatusList(pydantic_v1.BaseModel): +class PaginatedSyncStatusList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[SyncStatus]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/paginated_user_list.py b/src/merge/resources/filestorage/types/paginated_user_list.py index 6faaf352..d3e02f65 100644 --- a/src/merge/resources/filestorage/types/paginated_user_list.py +++ b/src/merge/resources/filestorage/types/paginated_user_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .user import User -class PaginatedUserList(pydantic_v1.BaseModel): +class PaginatedUserList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[User]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/permission.py b/src/merge/resources/filestorage/types/permission.py index c300cee9..2f6b5dbc 100644 --- a/src/merge/resources/filestorage/types/permission.py +++ b/src/merge/resources/filestorage/types/permission.py @@ -3,15 +3,16 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .permission_group import PermissionGroup from .permission_roles_item import PermissionRolesItem from .permission_type import PermissionType from .permission_user import PermissionUser -class Permission(pydantic_v1.BaseModel): +class Permission(UniversalBaseModel): """ # The Permission Object @@ -25,32 +26,32 @@ class Permission(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - user: typing.Optional[PermissionUser] = pydantic_v1.Field() + user: typing.Optional[PermissionUser] = pydantic.Field() """ The user that is granted this permission. """ - group: typing.Optional[PermissionGroup] = pydantic_v1.Field() + group: typing.Optional[PermissionGroup] = pydantic.Field() """ The group that is granted this permission. """ - type: typing.Optional[PermissionType] = pydantic_v1.Field() + type: typing.Optional[PermissionType] = pydantic.Field() """ Denotes what type of people have access to the file. @@ -60,25 +61,16 @@ class Permission(pydantic_v1.BaseModel): - `ANYONE` - ANYONE """ - roles: typing.Optional[typing.List[typing.Optional[PermissionRolesItem]]] = pydantic_v1.Field() + roles: typing.Optional[typing.List[typing.Optional[PermissionRolesItem]]] = pydantic.Field() """ The permissions that the user or group has for the File or Folder. It is possible for a user or group to have multiple roles, such as viewing & uploading. Possible values include: `READ`, `WRITE`, `OWNER`. In cases where there is no clear mapping, the original value passed through will be returned. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/permission_request.py b/src/merge/resources/filestorage/types/permission_request.py index 1ecc55d7..a0ed836d 100644 --- a/src/merge/resources/filestorage/types/permission_request.py +++ b/src/merge/resources/filestorage/types/permission_request.py @@ -1,17 +1,17 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .permission_request_group import PermissionRequestGroup from .permission_request_roles_item import PermissionRequestRolesItem from .permission_request_type import PermissionRequestType from .permission_request_user import PermissionRequestUser -class PermissionRequest(pydantic_v1.BaseModel): +class PermissionRequest(UniversalBaseModel): """ # The Permission Object @@ -24,22 +24,22 @@ class PermissionRequest(pydantic_v1.BaseModel): Fetch from the `GET Files` or `GET Folders` endpoint. Permissions are unexpanded by default. Use the query param `expand=permissions` to see more details. """ - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - user: typing.Optional[PermissionRequestUser] = pydantic_v1.Field() + user: typing.Optional[PermissionRequestUser] = pydantic.Field() """ The user that is granted this permission. """ - group: typing.Optional[PermissionRequestGroup] = pydantic_v1.Field() + group: typing.Optional[PermissionRequestGroup] = pydantic.Field() """ The group that is granted this permission. """ - type: typing.Optional[PermissionRequestType] = pydantic_v1.Field() + type: typing.Optional[PermissionRequestType] = pydantic.Field() """ Denotes what type of people have access to the file. @@ -49,7 +49,7 @@ class PermissionRequest(pydantic_v1.BaseModel): - `ANYONE` - ANYONE """ - roles: typing.Optional[typing.List[typing.Optional[PermissionRequestRolesItem]]] = pydantic_v1.Field() + roles: typing.Optional[typing.List[typing.Optional[PermissionRequestRolesItem]]] = pydantic.Field() """ The permissions that the user or group has for the File or Folder. It is possible for a user or group to have multiple roles, such as viewing & uploading. Possible values include: `READ`, `WRITE`, `OWNER`. In cases where there is no clear mapping, the original value passed through will be returned. """ @@ -57,20 +57,11 @@ class PermissionRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/remote_endpoint_info.py b/src/merge/resources/filestorage/types/remote_endpoint_info.py index da6037bc..9f627cae 100644 --- a/src/merge/resources/filestorage/types/remote_endpoint_info.py +++ b/src/merge/resources/filestorage/types/remote_endpoint_info.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteEndpointInfo(pydantic_v1.BaseModel): + +class RemoteEndpointInfo(UniversalBaseModel): method: str url_path: str field_traversal_path: typing.List[typing.Any] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/remote_field_api.py b/src/merge/resources/filestorage/types/remote_field_api.py index c2a16698..1d1efb6a 100644 --- a/src/merge/resources/filestorage/types/remote_field_api.py +++ b/src/merge/resources/filestorage/types/remote_field_api.py @@ -1,39 +1,28 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .advanced_metadata import AdvancedMetadata from .remote_endpoint_info import RemoteEndpointInfo from .remote_field_api_coverage import RemoteFieldApiCoverage -class RemoteFieldApi(pydantic_v1.BaseModel): - schema_: typing.Dict[str, typing.Any] = pydantic_v1.Field(alias="schema") +class RemoteFieldApi(UniversalBaseModel): + schema_: typing.Dict[str, typing.Any] = pydantic.Field(alias="schema") remote_key_name: str remote_endpoint_info: RemoteEndpointInfo example_values: typing.List[typing.Any] advanced_metadata: typing.Optional[AdvancedMetadata] coverage: typing.Optional[RemoteFieldApiCoverage] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/remote_field_api_response.py b/src/merge/resources/filestorage/types/remote_field_api_response.py index 4dca3199..d39a07a2 100644 --- a/src/merge/resources/filestorage/types/remote_field_api_response.py +++ b/src/merge/resources/filestorage/types/remote_field_api_response.py @@ -1,36 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .remote_field_api import RemoteFieldApi - +import pydantic -class RemoteFieldApiResponse(pydantic_v1.BaseModel): - file: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="File") - folder: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Folder") - drive: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Drive") - group: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Group") - user: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="User") +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .remote_field_api import RemoteFieldApi - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class RemoteFieldApiResponse(UniversalBaseModel): + file: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="File") + folder: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Folder") + drive: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Drive") + group: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Group") + user: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="User") - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/remote_key.py b/src/merge/resources/filestorage/types/remote_key.py index e0bec368..0ce7d620 100644 --- a/src/merge/resources/filestorage/types/remote_key.py +++ b/src/merge/resources/filestorage/types/remote_key.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteKey(pydantic_v1.BaseModel): + +class RemoteKey(UniversalBaseModel): """ # The RemoteKey Object @@ -23,20 +23,11 @@ class RemoteKey(pydantic_v1.BaseModel): name: str key: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/remote_response.py b/src/merge/resources/filestorage/types/remote_response.py index f39951ee..5551bafc 100644 --- a/src/merge/resources/filestorage/types/remote_response.py +++ b/src/merge/resources/filestorage/types/remote_response.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .response_type_enum import ResponseTypeEnum -class RemoteResponse(pydantic_v1.BaseModel): +class RemoteResponse(UniversalBaseModel): """ # The RemoteResponse Object @@ -29,20 +29,11 @@ class RemoteResponse(pydantic_v1.BaseModel): response_type: typing.Optional[ResponseTypeEnum] headers: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/sync_status.py b/src/merge/resources/filestorage/types/sync_status.py index c6b7cbc4..03668cbf 100644 --- a/src/merge/resources/filestorage/types/sync_status.py +++ b/src/merge/resources/filestorage/types/sync_status.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .selective_sync_configurations_usage_enum import SelectiveSyncConfigurationsUsageEnum from .sync_status_status_enum import SyncStatusStatusEnum -class SyncStatus(pydantic_v1.BaseModel): +class SyncStatus(UniversalBaseModel): """ # The SyncStatus Object @@ -30,20 +31,11 @@ class SyncStatus(pydantic_v1.BaseModel): is_initial_sync: bool selective_sync_configurations_usage: typing.Optional[SelectiveSyncConfigurationsUsageEnum] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/user.py b/src/merge/resources/filestorage/types/user.py index 792f60ee..9fee4ebb 100644 --- a/src/merge/resources/filestorage/types/user.py +++ b/src/merge/resources/filestorage/types/user.py @@ -3,11 +3,12 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class User(pydantic_v1.BaseModel): + +class User(UniversalBaseModel): """ # The User Object @@ -21,37 +22,37 @@ class User(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The user's name. """ - email_address: typing.Optional[str] = pydantic_v1.Field() + email_address: typing.Optional[str] = pydantic.Field() """ The user's email address. This is typically used to identify a user across linked accounts. """ - is_me: typing.Optional[bool] = pydantic_v1.Field() + is_me: typing.Optional[bool] = pydantic.Field() """ Whether the user is the one who linked this account. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -59,20 +60,11 @@ class User(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[typing.Optional[typing.Dict[str, typing.Any]]]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/validation_problem_source.py b/src/merge/resources/filestorage/types/validation_problem_source.py index fde15b40..c65d82ef 100644 --- a/src/merge/resources/filestorage/types/validation_problem_source.py +++ b/src/merge/resources/filestorage/types/validation_problem_source.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ValidationProblemSource(pydantic_v1.BaseModel): - pointer: str - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ValidationProblemSource(UniversalBaseModel): + pointer: str - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/warning_validation_problem.py b/src/merge/resources/filestorage/types/warning_validation_problem.py index 6baf9600..348d668a 100644 --- a/src/merge/resources/filestorage/types/warning_validation_problem.py +++ b/src/merge/resources/filestorage/types/warning_validation_problem.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .validation_problem_source import ValidationProblemSource -class WarningValidationProblem(pydantic_v1.BaseModel): +class WarningValidationProblem(UniversalBaseModel): source: typing.Optional[ValidationProblemSource] title: str detail: str problem_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/filestorage/types/webhook_receiver.py b/src/merge/resources/filestorage/types/webhook_receiver.py index 0544f256..bb10af95 100644 --- a/src/merge/resources/filestorage/types/webhook_receiver.py +++ b/src/merge/resources/filestorage/types/webhook_receiver.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class WebhookReceiver(pydantic_v1.BaseModel): + +class WebhookReceiver(UniversalBaseModel): event: str is_active: bool key: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/resources/account_details/client.py b/src/merge/resources/hris/resources/account_details/client.py index 8e78ada2..04c084d5 100644 --- a/src/merge/resources/hris/resources/account_details/client.py +++ b/src/merge/resources/hris/resources/account_details/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.account_details import AccountDetails @@ -41,9 +41,9 @@ def retrieve(self, *, request_options: typing.Optional[RequestOptions] = None) - _response = self._client_wrapper.httpx_client.request( "hris/v1/account-details", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountDetails, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountDetails, parse_obj_as(type_=AccountDetails, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -70,20 +70,28 @@ async def retrieve(self, *, request_options: typing.Optional[RequestOptions] = N Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.account_details.retrieve() + + + async def main() -> None: + await client.hris.account_details.retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/account-details", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountDetails, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountDetails, parse_obj_as(type_=AccountDetails, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/account_token/client.py b/src/merge/resources/hris/resources/account_token/client.py index f7f56a5c..7e0ae1ac 100644 --- a/src/merge/resources/hris/resources/account_token/client.py +++ b/src/merge/resources/hris/resources/account_token/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.account_token import AccountToken @@ -46,9 +46,9 @@ def retrieve(self, public_token: str, *, request_options: typing.Optional[Reques _response = self._client_wrapper.httpx_client.request( f"hris/v1/account-token/{jsonable_encoder(public_token)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountToken, parse_obj_as(type_=AccountToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -79,22 +79,30 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.account_token.retrieve( - public_token="public_token", - ) + + + async def main() -> None: + await client.hris.account_token.retrieve( + public_token="public_token", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/account-token/{jsonable_encoder(public_token)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountToken, parse_obj_as(type_=AccountToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/async_passthrough/client.py b/src/merge/resources/hris/resources/async_passthrough/client.py index 98a3f0d5..f41986c7 100644 --- a/src/merge/resources/hris/resources/async_passthrough/client.py +++ b/src/merge/resources/hris/resources/async_passthrough/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.async_passthrough_reciept import AsyncPassthroughReciept from ...types.data_passthrough_request import DataPassthroughRequest @@ -57,9 +57,9 @@ def create( _response = self._client_wrapper.httpx_client.request( "hris/v1/async-passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AsyncPassthroughReciept, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AsyncPassthroughReciept, parse_obj_as(type_=AsyncPassthroughReciept, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -100,9 +100,9 @@ def retrieve( method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -133,6 +133,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.hris import DataPassthroughRequest, MethodEnum @@ -140,19 +142,25 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.async_passthrough.create( - request=DataPassthroughRequest( - method=MethodEnum.GET, - path="/scooters", - ), - ) + + + async def main() -> None: + await client.hris.async_passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/async-passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AsyncPassthroughReciept, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AsyncPassthroughReciept, parse_obj_as(type_=AsyncPassthroughReciept, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -178,24 +186,32 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.async_passthrough.retrieve( - async_passthrough_receipt_id="async_passthrough_receipt_id", - ) + + + async def main() -> None: + await client.hris.async_passthrough.retrieve( + async_passthrough_receipt_id="async_passthrough_receipt_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/async-passthrough/{jsonable_encoder(async_passthrough_receipt_id)}", method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/audit_trail/client.py b/src/merge/resources/hris/resources/audit_trail/client.py index b8a4ae57..b6c8112e 100644 --- a/src/merge/resources/hris/resources/audit_trail/client.py +++ b/src/merge/resources/hris/resources/audit_trail/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_audit_log_event_list import PaginatedAuditLogEventList @@ -79,9 +79,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAuditLogEventList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAuditLogEventList, parse_obj_as(type_=PaginatedAuditLogEventList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -136,13 +136,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.audit_trail.list() + + + async def main() -> None: + await client.hris.audit_trail.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/audit-trail", @@ -157,9 +165,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAuditLogEventList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAuditLogEventList, parse_obj_as(type_=PaginatedAuditLogEventList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/available_actions/client.py b/src/merge/resources/hris/resources/available_actions/client.py index 63ebbc7c..6d4c5a6f 100644 --- a/src/merge/resources/hris/resources/available_actions/client.py +++ b/src/merge/resources/hris/resources/available_actions/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.available_actions import AvailableActions @@ -41,9 +41,9 @@ def retrieve(self, *, request_options: typing.Optional[RequestOptions] = None) - _response = self._client_wrapper.httpx_client.request( "hris/v1/available-actions", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AvailableActions, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AvailableActions, parse_obj_as(type_=AvailableActions, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -70,20 +70,28 @@ async def retrieve(self, *, request_options: typing.Optional[RequestOptions] = N Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.available_actions.retrieve() + + + async def main() -> None: + await client.hris.available_actions.retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/available-actions", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AvailableActions, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AvailableActions, parse_obj_as(type_=AvailableActions, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/bank_info/client.py b/src/merge/resources/hris/resources/bank_info/client.py index c56f2ccc..e4678f0a 100644 --- a/src/merge/resources/hris/resources/bank_info/client.py +++ b/src/merge/resources/hris/resources/bank_info/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.bank_info import BankInfo from ...types.paginated_bank_info_list import PaginatedBankInfoList @@ -138,9 +138,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedBankInfoList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedBankInfoList, parse_obj_as(type_=PaginatedBankInfoList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -206,9 +206,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BankInfo, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(BankInfo, parse_obj_as(type_=BankInfo, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -306,13 +306,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.bank_info.list() + + + async def main() -> None: + await client.hris.bank_info.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/bank-info", @@ -337,9 +345,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedBankInfoList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedBankInfoList, parse_obj_as(type_=PaginatedBankInfoList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -384,15 +392,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.bank_info.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.bank_info.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/bank-info/{jsonable_encoder(id)}", @@ -405,9 +421,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(BankInfo, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(BankInfo, parse_obj_as(type_=BankInfo, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/benefits/client.py b/src/merge/resources/hris/resources/benefits/client.py index 64a9292a..c2433efe 100644 --- a/src/merge/resources/hris/resources/benefits/client.py +++ b/src/merge/resources/hris/resources/benefits/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.benefit import Benefit from ...types.paginated_benefit_list import PaginatedBenefitList @@ -108,9 +108,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedBenefitList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedBenefitList, parse_obj_as(type_=PaginatedBenefitList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -163,9 +163,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Benefit, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Benefit, parse_obj_as(type_=Benefit, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -240,13 +240,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.benefits.list() + + + async def main() -> None: + await client.hris.benefits.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/benefits", @@ -266,9 +274,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedBenefitList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedBenefitList, parse_obj_as(type_=PaginatedBenefitList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -305,15 +313,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.benefits.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.benefits.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/benefits/{jsonable_encoder(id)}", @@ -321,9 +337,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Benefit, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Benefit, parse_obj_as(type_=Benefit, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/companies/client.py b/src/merge/resources/hris/resources/companies/client.py index 60d6334d..3260a79f 100644 --- a/src/merge/resources/hris/resources/companies/client.py +++ b/src/merge/resources/hris/resources/companies/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.company import Company from ...types.paginated_company_list import PaginatedCompanyList @@ -98,9 +98,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCompanyList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCompanyList, parse_obj_as(type_=PaginatedCompanyList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -149,9 +149,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Company, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Company, parse_obj_as(type_=Company, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -218,13 +218,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.companies.list() + + + async def main() -> None: + await client.hris.companies.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/companies", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCompanyList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCompanyList, parse_obj_as(type_=PaginatedCompanyList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -277,15 +285,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.companies.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.companies.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/companies/{jsonable_encoder(id)}", @@ -293,9 +309,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Company, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Company, parse_obj_as(type_=Company, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/delete_account/client.py b/src/merge/resources/hris/resources/delete_account/client.py index 7271b543..7df665ea 100644 --- a/src/merge/resources/hris/resources/delete_account/client.py +++ b/src/merge/resources/hris/resources/delete_account/client.py @@ -38,9 +38,9 @@ def delete(self, *, request_options: typing.Optional[RequestOptions] = None) -> _response = self._client_wrapper.httpx_client.request( "hris/v1/delete-account", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -66,20 +66,28 @@ async def delete(self, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.delete_account.delete() + + + async def main() -> None: + await client.hris.delete_account.delete() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/delete-account", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/dependents/client.py b/src/merge/resources/hris/resources/dependents/client.py index 9ef72e61..844fdbce 100644 --- a/src/merge/resources/hris/resources/dependents/client.py +++ b/src/merge/resources/hris/resources/dependents/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.dependent import Dependent from ...types.paginated_dependent_list import PaginatedDependentList @@ -103,9 +103,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedDependentList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedDependentList, parse_obj_as(type_=PaginatedDependentList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -158,9 +158,9 @@ def retrieve( params={"include_remote_data": include_remote_data, "include_sensitive_fields": include_sensitive_fields}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Dependent, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Dependent, parse_obj_as(type_=Dependent, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -231,13 +231,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.dependents.list() + + + async def main() -> None: + await client.hris.dependents.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/dependents", @@ -256,9 +264,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedDependentList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedDependentList, parse_obj_as(type_=PaginatedDependentList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -295,15 +303,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.dependents.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.dependents.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/dependents/{jsonable_encoder(id)}", @@ -311,9 +327,9 @@ async def retrieve( params={"include_remote_data": include_remote_data, "include_sensitive_fields": include_sensitive_fields}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Dependent, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Dependent, parse_obj_as(type_=Dependent, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/employee_payroll_runs/client.py b/src/merge/resources/hris/resources/employee_payroll_runs/client.py index 5d06d544..aa57b636 100644 --- a/src/merge/resources/hris/resources/employee_payroll_runs/client.py +++ b/src/merge/resources/hris/resources/employee_payroll_runs/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.employee_payroll_run import EmployeePayrollRun from ...types.paginated_employee_payroll_run_list import PaginatedEmployeePayrollRunList @@ -135,9 +135,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedEmployeePayrollRunList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedEmployeePayrollRunList, parse_obj_as(type_=PaginatedEmployeePayrollRunList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -190,9 +190,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EmployeePayrollRun, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(EmployeePayrollRun, parse_obj_as(type_=EmployeePayrollRun, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -287,13 +287,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.employee_payroll_runs.list() + + + async def main() -> None: + await client.hris.employee_payroll_runs.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/employee-payroll-runs", @@ -318,9 +326,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedEmployeePayrollRunList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedEmployeePayrollRunList, parse_obj_as(type_=PaginatedEmployeePayrollRunList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -357,15 +365,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.employee_payroll_runs.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.employee_payroll_runs.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/employee-payroll-runs/{jsonable_encoder(id)}", @@ -373,9 +389,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EmployeePayrollRun, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(EmployeePayrollRun, parse_obj_as(type_=EmployeePayrollRun, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/employees/client.py b/src/merge/resources/hris/resources/employees/client.py index 728d2718..69ea4813 100644 --- a/src/merge/resources/hris/resources/employees/client.py +++ b/src/merge/resources/hris/resources/employees/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.employee import Employee from ...types.employee_request import EmployeeRequest @@ -231,9 +231,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedEmployeeList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedEmployeeList, parse_obj_as(type_=PaginatedEmployeeList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -289,9 +289,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EmployeeResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(EmployeeResponse, parse_obj_as(type_=EmployeeResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -362,9 +362,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Employee, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Employee, parse_obj_as(type_=Employee, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -417,9 +417,9 @@ def ignore_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -452,9 +452,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "hris/v1/employees/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -617,13 +617,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.employees.list() + + + async def main() -> None: + await client.hris.employees.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/employees", @@ -664,9 +672,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedEmployeeList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedEmployeeList, parse_obj_as(type_=PaginatedEmployeeList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -703,6 +711,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.hris import EmployeeRequest @@ -710,9 +720,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.employees.create( - model=EmployeeRequest(), - ) + + + async def main() -> None: + await client.hris.employees.create( + model=EmployeeRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/employees", @@ -722,9 +738,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EmployeeResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(EmployeeResponse, parse_obj_as(type_=EmployeeResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -773,15 +789,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.employees.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.employees.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/employees/{jsonable_encoder(id)}", @@ -795,9 +819,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Employee, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Employee, parse_obj_as(type_=Employee, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -831,6 +855,8 @@ async def ignore_create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.hris import ReasonEnum @@ -838,10 +864,16 @@ async def ignore_create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.employees.ignore_create( - model_id="model_id", - reason=ReasonEnum.GENERAL_CUSTOMER_REQUEST, - ) + + + async def main() -> None: + await client.hris.employees.ignore_create( + model_id="model_id", + reason=ReasonEnum.GENERAL_CUSTOMER_REQUEST, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/employees/ignore/{jsonable_encoder(model_id)}", @@ -850,9 +882,9 @@ async def ignore_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -874,20 +906,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.employees.meta_post_retrieve() + + + async def main() -> None: + await client.hris.employees.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/employees/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/employer_benefits/client.py b/src/merge/resources/hris/resources/employer_benefits/client.py index a9815b0e..087b4793 100644 --- a/src/merge/resources/hris/resources/employer_benefits/client.py +++ b/src/merge/resources/hris/resources/employer_benefits/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.employer_benefit import EmployerBenefit from ...types.paginated_employer_benefit_list import PaginatedEmployerBenefitList @@ -98,9 +98,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedEmployerBenefitList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedEmployerBenefitList, parse_obj_as(type_=PaginatedEmployerBenefitList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -149,9 +149,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EmployerBenefit, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(EmployerBenefit, parse_obj_as(type_=EmployerBenefit, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -218,13 +218,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.employer_benefits.list() + + + async def main() -> None: + await client.hris.employer_benefits.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/employer-benefits", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedEmployerBenefitList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedEmployerBenefitList, parse_obj_as(type_=PaginatedEmployerBenefitList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -277,15 +285,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.employer_benefits.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.employer_benefits.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/employer-benefits/{jsonable_encoder(id)}", @@ -293,9 +309,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(EmployerBenefit, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(EmployerBenefit, parse_obj_as(type_=EmployerBenefit, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/employments/client.py b/src/merge/resources/hris/resources/employments/client.py index 05878468..0baaa04a 100644 --- a/src/merge/resources/hris/resources/employments/client.py +++ b/src/merge/resources/hris/resources/employments/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.employment import Employment from ...types.paginated_employment_list import PaginatedEmploymentList @@ -130,9 +130,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedEmploymentList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedEmploymentList, parse_obj_as(type_=PaginatedEmploymentList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -198,9 +198,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Employment, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Employment, parse_obj_as(type_=Employment, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -287,13 +287,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.employments.list() + + + async def main() -> None: + await client.hris.employments.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/employments", @@ -316,9 +324,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedEmploymentList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedEmploymentList, parse_obj_as(type_=PaginatedEmploymentList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -363,15 +371,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.employments.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.employments.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/employments/{jsonable_encoder(id)}", @@ -384,9 +400,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Employment, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Employment, parse_obj_as(type_=Employment, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/field_mapping/client.py b/src/merge/resources/hris/resources/field_mapping/client.py index 5b6d9dbd..f6a04bbc 100644 --- a/src/merge/resources/hris/resources/field_mapping/client.py +++ b/src/merge/resources/hris/resources/field_mapping/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.external_target_field_api_response import ExternalTargetFieldApiResponse from ...types.field_mapping_api_instance_response import FieldMappingApiInstanceResponse @@ -50,9 +50,9 @@ def field_mappings_retrieve( _response = self._client_wrapper.httpx_client.request( "hris/v1/field-mappings", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingApiInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingApiInstanceResponse, parse_obj_as(type_=FieldMappingApiInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -131,9 +131,9 @@ def field_mappings_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -174,9 +174,9 @@ def field_mappings_destroy( method="DELETE", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -238,9 +238,9 @@ def field_mappings_partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -288,9 +288,9 @@ def remote_fields_retrieve( params={"common_models": common_models, "include_example_values": include_example_values}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteFieldApiResponse, parse_obj_as(type_=RemoteFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -325,9 +325,9 @@ def target_fields_retrieve( _response = self._client_wrapper.httpx_client.request( "hris/v1/target-fields", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExternalTargetFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ExternalTargetFieldApiResponse, parse_obj_as(type_=ExternalTargetFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -356,20 +356,28 @@ async def field_mappings_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.field_mapping.field_mappings_retrieve() + + + async def main() -> None: + await client.hris.field_mapping.field_mappings_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/field-mappings", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingApiInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingApiInstanceResponse, parse_obj_as(type_=FieldMappingApiInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -419,20 +427,28 @@ async def field_mappings_create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.field_mapping.field_mappings_create( - target_field_name="example_target_field_name", - target_field_description="this is a example description of the target field", - remote_field_traversal_path=["example_remote_field"], - remote_method="GET", - remote_url_path="/example-url-path", - common_model_name="ExampleCommonModel", - ) + + + async def main() -> None: + await client.hris.field_mapping.field_mappings_create( + target_field_name="example_target_field_name", + target_field_description="this is a example description of the target field", + remote_field_traversal_path=["example_remote_field"], + remote_method="GET", + remote_url_path="/example-url-path", + common_model_name="ExampleCommonModel", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/field-mappings", @@ -448,9 +464,9 @@ async def field_mappings_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -476,24 +492,32 @@ async def field_mappings_destroy( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.field_mapping.field_mappings_destroy( - field_mapping_id="field_mapping_id", - ) + + + async def main() -> None: + await client.hris.field_mapping.field_mappings_destroy( + field_mapping_id="field_mapping_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/field-mappings/{jsonable_encoder(field_mapping_id)}", method="DELETE", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -534,15 +558,23 @@ async def field_mappings_partial_update( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.field_mapping.field_mappings_partial_update( - field_mapping_id="field_mapping_id", - ) + + + async def main() -> None: + await client.hris.field_mapping.field_mappings_partial_update( + field_mapping_id="field_mapping_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/field-mappings/{jsonable_encoder(field_mapping_id)}", @@ -555,9 +587,9 @@ async def field_mappings_partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -591,13 +623,21 @@ async def remote_fields_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.field_mapping.remote_fields_retrieve() + + + async def main() -> None: + await client.hris.field_mapping.remote_fields_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/remote-fields", @@ -605,9 +645,9 @@ async def remote_fields_retrieve( params={"common_models": common_models, "include_example_values": include_example_values}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteFieldApiResponse, parse_obj_as(type_=RemoteFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -631,20 +671,28 @@ async def target_fields_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.field_mapping.target_fields_retrieve() + + + async def main() -> None: + await client.hris.field_mapping.target_fields_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/target-fields", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExternalTargetFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ExternalTargetFieldApiResponse, parse_obj_as(type_=ExternalTargetFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/force_resync/client.py b/src/merge/resources/hris/resources/force_resync/client.py index 8d591b64..6abc8d42 100644 --- a/src/merge/resources/hris/resources/force_resync/client.py +++ b/src/merge/resources/hris/resources/force_resync/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.sync_status import SyncStatus @@ -43,9 +43,9 @@ def sync_status_resync_create( _response = self._client_wrapper.httpx_client.request( "hris/v1/sync-status/resync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[SyncStatus], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[SyncStatus], parse_obj_as(type_=typing.List[SyncStatus], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -74,20 +74,28 @@ async def sync_status_resync_create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.force_resync.sync_status_resync_create() + + + async def main() -> None: + await client.hris.force_resync.sync_status_resync_create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/sync-status/resync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[SyncStatus], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[SyncStatus], parse_obj_as(type_=typing.List[SyncStatus], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/generate_key/client.py b/src/merge/resources/hris/resources/generate_key/client.py index 2304c5e0..b6a40411 100644 --- a/src/merge/resources/hris/resources/generate_key/client.py +++ b/src/merge/resources/hris/resources/generate_key/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.remote_key import RemoteKey @@ -49,9 +49,9 @@ def create(self, *, name: str, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "hris/v1/generate-key", method="POST", json={"name": name}, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -81,22 +81,30 @@ async def create(self, *, name: str, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.generate_key.create( - name="Remote Deployment Key 1", - ) + + + async def main() -> None: + await client.hris.generate_key.create( + name="Remote Deployment Key 1", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/generate-key", method="POST", json={"name": name}, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/groups/client.py b/src/merge/resources/hris/resources/groups/client.py index 79f59371..1e88c1c9 100644 --- a/src/merge/resources/hris/resources/groups/client.py +++ b/src/merge/resources/hris/resources/groups/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.group import Group from ...types.paginated_group_list import PaginatedGroupList @@ -123,9 +123,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedGroupList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedGroupList, parse_obj_as(type_=PaginatedGroupList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -186,9 +186,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Group, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Group, parse_obj_as(type_=Group, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -275,13 +275,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.groups.list() + + + async def main() -> None: + await client.hris.groups.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/groups", @@ -304,9 +312,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedGroupList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedGroupList, parse_obj_as(type_=PaginatedGroupList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -347,15 +355,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.groups.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.groups.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/groups/{jsonable_encoder(id)}", @@ -367,9 +383,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Group, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Group, parse_obj_as(type_=Group, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/issues/client.py b/src/merge/resources/hris/resources/issues/client.py index d8d06bef..c893640c 100644 --- a/src/merge/resources/hris/resources/issues/client.py +++ b/src/merge/resources/hris/resources/issues/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.issue import Issue from ...types.paginated_issue_list import PaginatedIssueList @@ -127,9 +127,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedIssueList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedIssueList, parse_obj_as(type_=PaginatedIssueList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -166,9 +166,9 @@ def retrieve(self, id: str, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( f"hris/v1/issues/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Issue, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Issue, parse_obj_as(type_=Issue, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -251,13 +251,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.issues.list() + + + async def main() -> None: + await client.hris.issues.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/issues", @@ -287,9 +295,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedIssueList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedIssueList, parse_obj_as(type_=PaginatedIssueList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -313,22 +321,30 @@ async def retrieve(self, id: str, *, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.issues.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.issues.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/issues/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Issue, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Issue, parse_obj_as(type_=Issue, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/link_token/client.py b/src/merge/resources/hris/resources/link_token/client.py index e1cfc7fe..47643e31 100644 --- a/src/merge/resources/hris/resources/link_token/client.py +++ b/src/merge/resources/hris/resources/link_token/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.categories_enum import CategoriesEnum from ...types.common_model_scopes_body_request import CommonModelScopesBodyRequest @@ -119,9 +119,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LinkToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(LinkToken, parse_obj_as(type_=LinkToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -198,6 +198,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.hris import CategoriesEnum @@ -205,12 +207,18 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.link_token.create( - end_user_email_address="example@gmail.com", - end_user_organization_name="Test Organization", - end_user_origin_id="12345", - categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], - ) + + + async def main() -> None: + await client.hris.link_token.create( + end_user_email_address="example@gmail.com", + end_user_organization_name="Test Organization", + end_user_origin_id="12345", + categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/link-token", @@ -231,9 +239,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LinkToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(LinkToken, parse_obj_as(type_=LinkToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/linked_accounts/client.py b/src/merge/resources/hris/resources/linked_accounts/client.py index e4b32875..745ced32 100644 --- a/src/merge/resources/hris/resources/linked_accounts/client.py +++ b/src/merge/resources/hris/resources/linked_accounts/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_account_details_and_actions_list import PaginatedAccountDetailsAndActionsList from .types.linked_accounts_list_request_category import LinkedAccountsListRequestCategory @@ -122,9 +122,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountDetailsAndActionsList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountDetailsAndActionsList, parse_obj_as(type_=PaginatedAccountDetailsAndActionsList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -214,13 +214,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.linked_accounts.list() + + + async def main() -> None: + await client.hris.linked_accounts.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/linked-accounts", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountDetailsAndActionsList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountDetailsAndActionsList, parse_obj_as(type_=PaginatedAccountDetailsAndActionsList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/locations/client.py b/src/merge/resources/hris/resources/locations/client.py index be88581a..6d559dfc 100644 --- a/src/merge/resources/hris/resources/locations/client.py +++ b/src/merge/resources/hris/resources/locations/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.location import Location from ...types.paginated_location_list import PaginatedLocationList @@ -117,9 +117,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedLocationList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedLocationList, parse_obj_as(type_=PaginatedLocationList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -180,9 +180,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Location, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Location, parse_obj_as(type_=Location, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -264,13 +264,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.locations.list() + + + async def main() -> None: + await client.hris.locations.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/locations", @@ -291,9 +299,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedLocationList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedLocationList, parse_obj_as(type_=PaginatedLocationList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -334,15 +342,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.locations.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.locations.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/locations/{jsonable_encoder(id)}", @@ -354,9 +370,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Location, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Location, parse_obj_as(type_=Location, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/passthrough/client.py b/src/merge/resources/hris/resources/passthrough/client.py index 063bb68f..b5bd475b 100644 --- a/src/merge/resources/hris/resources/passthrough/client.py +++ b/src/merge/resources/hris/resources/passthrough/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.data_passthrough_request import DataPassthroughRequest from ...types.remote_response import RemoteResponse @@ -55,9 +55,9 @@ def create( _response = self._client_wrapper.httpx_client.request( "hris/v1/passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -88,6 +88,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.hris import DataPassthroughRequest, MethodEnum @@ -95,19 +97,25 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.passthrough.create( - request=DataPassthroughRequest( - method=MethodEnum.GET, - path="/scooters", - ), - ) + + + async def main() -> None: + await client.hris.passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/pay_groups/client.py b/src/merge/resources/hris/resources/pay_groups/client.py index 755204f2..2f35b7c7 100644 --- a/src/merge/resources/hris/resources/pay_groups/client.py +++ b/src/merge/resources/hris/resources/pay_groups/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_pay_group_list import PaginatedPayGroupList from ...types.pay_group import PayGroup @@ -98,9 +98,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedPayGroupList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedPayGroupList, parse_obj_as(type_=PaginatedPayGroupList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -149,9 +149,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PayGroup, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PayGroup, parse_obj_as(type_=PayGroup, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -218,13 +218,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.pay_groups.list() + + + async def main() -> None: + await client.hris.pay_groups.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/pay-groups", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedPayGroupList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedPayGroupList, parse_obj_as(type_=PaginatedPayGroupList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -277,15 +285,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.pay_groups.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.pay_groups.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/pay-groups/{jsonable_encoder(id)}", @@ -293,9 +309,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PayGroup, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PayGroup, parse_obj_as(type_=PayGroup, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/payroll_runs/client.py b/src/merge/resources/hris/resources/payroll_runs/client.py index 4f731aaa..1d9d342e 100644 --- a/src/merge/resources/hris/resources/payroll_runs/client.py +++ b/src/merge/resources/hris/resources/payroll_runs/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_payroll_run_list import PaginatedPayrollRunList from ...types.payroll_run import PayrollRun @@ -144,9 +144,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedPayrollRunList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedPayrollRunList, parse_obj_as(type_=PaginatedPayrollRunList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -207,9 +207,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PayrollRun, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PayrollRun, parse_obj_as(type_=PayrollRun, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -310,13 +310,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.payroll_runs.list() + + + async def main() -> None: + await client.hris.payroll_runs.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/payroll-runs", @@ -341,9 +349,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedPayrollRunList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedPayrollRunList, parse_obj_as(type_=PaginatedPayrollRunList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -384,15 +392,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.payroll_runs.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.payroll_runs.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/payroll-runs/{jsonable_encoder(id)}", @@ -404,9 +420,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PayrollRun, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PayrollRun, parse_obj_as(type_=PayrollRun, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/regenerate_key/client.py b/src/merge/resources/hris/resources/regenerate_key/client.py index a06628c1..602be3fe 100644 --- a/src/merge/resources/hris/resources/regenerate_key/client.py +++ b/src/merge/resources/hris/resources/regenerate_key/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.remote_key import RemoteKey @@ -49,9 +49,9 @@ def create(self, *, name: str, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "hris/v1/regenerate-key", method="POST", json={"name": name}, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -81,22 +81,30 @@ async def create(self, *, name: str, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.regenerate_key.create( - name="Remote Deployment Key 1", - ) + + + async def main() -> None: + await client.hris.regenerate_key.create( + name="Remote Deployment Key 1", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/regenerate-key", method="POST", json={"name": name}, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/scopes/client.py b/src/merge/resources/hris/resources/scopes/client.py index d5aac689..445a1b06 100644 --- a/src/merge/resources/hris/resources/scopes/client.py +++ b/src/merge/resources/hris/resources/scopes/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.common_model_scope_api import CommonModelScopeApi from ...types.individual_common_model_scope_deserializer_request import IndividualCommonModelScopeDeserializerRequest @@ -47,9 +47,9 @@ def default_scopes_retrieve( _response = self._client_wrapper.httpx_client.request( "hris/v1/default-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -84,9 +84,9 @@ def linked_account_scopes_retrieve( _response = self._client_wrapper.httpx_client.request( "hris/v1/linked-account-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -157,9 +157,9 @@ def linked_account_scopes_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -188,20 +188,28 @@ async def default_scopes_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.scopes.default_scopes_retrieve() + + + async def main() -> None: + await client.hris.scopes.default_scopes_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/default-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -225,20 +233,28 @@ async def linked_account_scopes_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.scopes.linked_account_scopes_retrieve() + + + async def main() -> None: + await client.hris.scopes.linked_account_scopes_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/linked-account-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -268,6 +284,8 @@ async def linked_account_scopes_create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.hris import ( IndividualCommonModelScopeDeserializerRequest, @@ -278,29 +296,35 @@ async def linked_account_scopes_create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.scopes.linked_account_scopes_create( - common_models=[ - IndividualCommonModelScopeDeserializerRequest( - model_name="Employee", - model_permissions={ - "READ": ModelPermissionDeserializerRequest( - is_enabled=True, - ), - "WRITE": ModelPermissionDeserializerRequest( - is_enabled=False, - ), - }, - ), - IndividualCommonModelScopeDeserializerRequest( - model_name="Benefit", - model_permissions={ - "WRITE": ModelPermissionDeserializerRequest( - is_enabled=False, - ) - }, - ), - ], - ) + + + async def main() -> None: + await client.hris.scopes.linked_account_scopes_create( + common_models=[ + IndividualCommonModelScopeDeserializerRequest( + model_name="Employee", + model_permissions={ + "READ": ModelPermissionDeserializerRequest( + is_enabled=True, + ), + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ), + }, + ), + IndividualCommonModelScopeDeserializerRequest( + model_name="Benefit", + model_permissions={ + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ) + }, + ), + ], + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/linked-account-scopes", @@ -309,9 +333,9 @@ async def linked_account_scopes_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/sync_status/client.py b/src/merge/resources/hris/resources/sync_status/client.py index a1ab5a1b..1cea4f1c 100644 --- a/src/merge/resources/hris/resources/sync_status/client.py +++ b/src/merge/resources/hris/resources/sync_status/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_sync_status_list import PaginatedSyncStatusList @@ -56,9 +56,9 @@ def list( params={"cursor": cursor, "page_size": page_size}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedSyncStatusList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedSyncStatusList, parse_obj_as(type_=PaginatedSyncStatusList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -97,13 +97,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.sync_status.list() + + + async def main() -> None: + await client.hris.sync_status.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/sync-status", @@ -111,9 +119,9 @@ async def list( params={"cursor": cursor, "page_size": page_size}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedSyncStatusList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedSyncStatusList, parse_obj_as(type_=PaginatedSyncStatusList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/teams/client.py b/src/merge/resources/hris/resources/teams/client.py index b5fde795..a439f50b 100644 --- a/src/merge/resources/hris/resources/teams/client.py +++ b/src/merge/resources/hris/resources/teams/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_team_list import PaginatedTeamList from ...types.team import Team @@ -108,9 +108,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTeamList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTeamList, parse_obj_as(type_=PaginatedTeamList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -163,9 +163,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Team, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Team, parse_obj_as(type_=Team, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -240,13 +240,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.teams.list() + + + async def main() -> None: + await client.hris.teams.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/teams", @@ -266,9 +274,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTeamList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTeamList, parse_obj_as(type_=PaginatedTeamList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -305,15 +313,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.teams.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.teams.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/teams/{jsonable_encoder(id)}", @@ -321,9 +337,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Team, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Team, parse_obj_as(type_=Team, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/time_off/client.py b/src/merge/resources/hris/resources/time_off/client.py index a729c018..de90d905 100644 --- a/src/merge/resources/hris/resources/time_off/client.py +++ b/src/merge/resources/hris/resources/time_off/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.meta_response import MetaResponse from ...types.paginated_time_off_list import PaginatedTimeOffList @@ -180,9 +180,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTimeOffList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTimeOffList, parse_obj_as(type_=PaginatedTimeOffList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -238,9 +238,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TimeOffResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TimeOffResponse, parse_obj_as(type_=TimeOffResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -306,9 +306,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TimeOff, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TimeOff, parse_obj_as(type_=TimeOff, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -341,9 +341,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "hris/v1/time-off/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -467,13 +467,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.time_off.list() + + + async def main() -> None: + await client.hris.time_off.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/time-off", @@ -502,9 +510,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTimeOffList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTimeOffList, parse_obj_as(type_=PaginatedTimeOffList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -541,6 +549,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.hris import TimeOffRequest @@ -548,9 +558,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.time_off.create( - model=TimeOffRequest(), - ) + + + async def main() -> None: + await client.hris.time_off.create( + model=TimeOffRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/time-off", @@ -560,9 +576,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TimeOffResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TimeOffResponse, parse_obj_as(type_=TimeOffResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -607,15 +623,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.time_off.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.time_off.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/time-off/{jsonable_encoder(id)}", @@ -628,9 +652,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TimeOff, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TimeOff, parse_obj_as(type_=TimeOff, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -652,20 +676,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.time_off.meta_post_retrieve() + + + async def main() -> None: + await client.hris.time_off.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/time-off/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/time_off_balances/client.py b/src/merge/resources/hris/resources/time_off_balances/client.py index 4794caff..7c28a2a5 100644 --- a/src/merge/resources/hris/resources/time_off_balances/client.py +++ b/src/merge/resources/hris/resources/time_off_balances/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_time_off_balance_list import PaginatedTimeOffBalanceList from ...types.time_off_balance import TimeOffBalance @@ -131,9 +131,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTimeOffBalanceList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTimeOffBalanceList, parse_obj_as(type_=PaginatedTimeOffBalanceList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -199,9 +199,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TimeOffBalance, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TimeOffBalance, parse_obj_as(type_=TimeOffBalance, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -295,13 +295,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.time_off_balances.list() + + + async def main() -> None: + await client.hris.time_off_balances.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/time-off-balances", @@ -324,9 +332,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTimeOffBalanceList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTimeOffBalanceList, parse_obj_as(type_=PaginatedTimeOffBalanceList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -371,15 +379,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.time_off_balances.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.time_off_balances.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/time-off-balances/{jsonable_encoder(id)}", @@ -392,9 +408,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TimeOffBalance, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TimeOffBalance, parse_obj_as(type_=TimeOffBalance, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/timesheet_entries/client.py b/src/merge/resources/hris/resources/timesheet_entries/client.py index 8bb063b0..bbb608df 100644 --- a/src/merge/resources/hris/resources/timesheet_entries/client.py +++ b/src/merge/resources/hris/resources/timesheet_entries/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.meta_response import MetaResponse from ...types.paginated_timesheet_entry_list import PaginatedTimesheetEntryList @@ -135,9 +135,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTimesheetEntryList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTimesheetEntryList, parse_obj_as(type_=PaginatedTimesheetEntryList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -193,9 +193,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TimesheetEntryResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TimesheetEntryResponse, parse_obj_as(type_=TimesheetEntryResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -244,9 +244,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TimesheetEntry, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TimesheetEntry, parse_obj_as(type_=TimesheetEntry, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -279,9 +279,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "hris/v1/timesheet-entries/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -372,13 +372,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.timesheet_entries.list() + + + async def main() -> None: + await client.hris.timesheet_entries.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/timesheet-entries", @@ -402,9 +410,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTimesheetEntryList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTimesheetEntryList, parse_obj_as(type_=PaginatedTimesheetEntryList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -441,6 +449,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.hris import TimesheetEntryRequest @@ -448,9 +458,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.timesheet_entries.create( - model=TimesheetEntryRequest(), - ) + + + async def main() -> None: + await client.hris.timesheet_entries.create( + model=TimesheetEntryRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/timesheet-entries", @@ -460,9 +476,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TimesheetEntryResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TimesheetEntryResponse, parse_obj_as(type_=TimesheetEntryResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -495,15 +511,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.timesheet_entries.retrieve( - id="id", - ) + + + async def main() -> None: + await client.hris.timesheet_entries.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"hris/v1/timesheet-entries/{jsonable_encoder(id)}", @@ -511,9 +535,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TimesheetEntry, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TimesheetEntry, parse_obj_as(type_=TimesheetEntry, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -535,20 +559,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.timesheet_entries.meta_post_retrieve() + + + async def main() -> None: + await client.hris.timesheet_entries.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/timesheet-entries/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/resources/webhook_receivers/client.py b/src/merge/resources/hris/resources/webhook_receivers/client.py index 76ef467d..485678c2 100644 --- a/src/merge/resources/hris/resources/webhook_receivers/client.py +++ b/src/merge/resources/hris/resources/webhook_receivers/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.webhook_receiver import WebhookReceiver @@ -44,9 +44,9 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty _response = self._client_wrapper.httpx_client.request( "hris/v1/webhook-receivers", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[WebhookReceiver], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[WebhookReceiver], parse_obj_as(type_=typing.List[WebhookReceiver], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -99,9 +99,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(WebhookReceiver, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(WebhookReceiver, parse_obj_as(type_=WebhookReceiver, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -128,20 +128,28 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.webhook_receivers.list() + + + async def main() -> None: + await client.hris.webhook_receivers.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/webhook-receivers", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[WebhookReceiver], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[WebhookReceiver], parse_obj_as(type_=typing.List[WebhookReceiver], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -176,16 +184,24 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.hris.webhook_receivers.create( - event="event", - is_active=True, - ) + + + async def main() -> None: + await client.hris.webhook_receivers.create( + event="event", + is_active=True, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "hris/v1/webhook-receivers", @@ -194,9 +210,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(WebhookReceiver, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(WebhookReceiver, parse_obj_as(type_=WebhookReceiver, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/hris/types/account_details.py b/src/merge/resources/hris/types/account_details.py index 81f9587b..58cd348a 100644 --- a/src/merge/resources/hris/types/account_details.py +++ b/src/merge/resources/hris/types/account_details.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .category_enum import CategoryEnum -class AccountDetails(pydantic_v1.BaseModel): +class AccountDetails(UniversalBaseModel): id: typing.Optional[str] integration: typing.Optional[str] integration_slug: typing.Optional[str] @@ -18,27 +18,18 @@ class AccountDetails(pydantic_v1.BaseModel): end_user_email_address: typing.Optional[str] status: typing.Optional[str] webhook_listener_url: typing.Optional[str] - is_duplicate: typing.Optional[bool] = pydantic_v1.Field() + is_duplicate: typing.Optional[bool] = pydantic.Field() """ Whether a Production Linked Account's credentials match another existing Production Linked Account. This field is `null` for Test Linked Accounts, incomplete Production Linked Accounts, and ignored duplicate Production Linked Account sets. """ account_type: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/account_details_and_actions.py b/src/merge/resources/hris/types/account_details_and_actions.py index 6a1b2874..ffc38f58 100644 --- a/src/merge/resources/hris/types/account_details_and_actions.py +++ b/src/merge/resources/hris/types/account_details_and_actions.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_details_and_actions_integration import AccountDetailsAndActionsIntegration from .account_details_and_actions_status_enum import AccountDetailsAndActionsStatusEnum from .category_enum import CategoryEnum -class AccountDetailsAndActions(pydantic_v1.BaseModel): +class AccountDetailsAndActions(UniversalBaseModel): """ # The LinkedAccount Object @@ -30,13 +30,13 @@ class AccountDetailsAndActions(pydantic_v1.BaseModel): end_user_origin_id: typing.Optional[str] end_user_organization_name: str end_user_email_address: str - subdomain: typing.Optional[str] = pydantic_v1.Field() + subdomain: typing.Optional[str] = pydantic.Field() """ The tenant or domain the customer has provided access to. """ webhook_listener_url: str - is_duplicate: typing.Optional[bool] = pydantic_v1.Field() + is_duplicate: typing.Optional[bool] = pydantic.Field() """ Whether a Production Linked Account's credentials match another existing Production Linked Account. This field is `null` for Test Linked Accounts, incomplete Production Linked Accounts, and ignored duplicate Production Linked Account sets. """ @@ -44,20 +44,11 @@ class AccountDetailsAndActions(pydantic_v1.BaseModel): integration: typing.Optional[AccountDetailsAndActionsIntegration] account_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/account_details_and_actions_integration.py b/src/merge/resources/hris/types/account_details_and_actions_integration.py index 7c300a2b..8aa63726 100644 --- a/src/merge/resources/hris/types/account_details_and_actions_integration.py +++ b/src/merge/resources/hris/types/account_details_and_actions_integration.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .categories_enum import CategoriesEnum from .model_operation import ModelOperation -class AccountDetailsAndActionsIntegration(pydantic_v1.BaseModel): +class AccountDetailsAndActionsIntegration(UniversalBaseModel): name: str categories: typing.List[CategoriesEnum] image: typing.Optional[str] @@ -19,20 +19,11 @@ class AccountDetailsAndActionsIntegration(pydantic_v1.BaseModel): passthrough_available: bool available_model_operations: typing.Optional[typing.List[ModelOperation]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/account_integration.py b/src/merge/resources/hris/types/account_integration.py index 57006e35..7a8a27ab 100644 --- a/src/merge/resources/hris/types/account_integration.py +++ b/src/merge/resources/hris/types/account_integration.py @@ -1,69 +1,60 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .categories_enum import CategoriesEnum -class AccountIntegration(pydantic_v1.BaseModel): - name: str = pydantic_v1.Field() +class AccountIntegration(UniversalBaseModel): + name: str = pydantic.Field() """ Company name. """ - categories: typing.Optional[typing.List[CategoriesEnum]] = pydantic_v1.Field() + categories: typing.Optional[typing.List[CategoriesEnum]] = pydantic.Field() """ Category or categories this integration belongs to. Multiple categories should be comma separated, i.e. [ats, hris]. """ - image: typing.Optional[str] = pydantic_v1.Field() + image: typing.Optional[str] = pydantic.Field() """ Company logo in rectangular shape. Upload an image with a clear background. """ - square_image: typing.Optional[str] = pydantic_v1.Field() + square_image: typing.Optional[str] = pydantic.Field() """ Company logo in square shape. Upload an image with a white background. """ - color: typing.Optional[str] = pydantic_v1.Field() + color: typing.Optional[str] = pydantic.Field() """ The color of this integration used for buttons and text throughout the app and landing pages. Choose a darker, saturated color. """ slug: typing.Optional[str] - api_endpoints_to_documentation_urls: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + api_endpoints_to_documentation_urls: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ Mapping of API endpoints to documentation urls for support. Example: {'GET': [['/common-model-scopes', 'https://docs.merge.dev/accounting/common-model-scopes/#common_model_scopes_retrieve'],['/common-model-actions', 'https://docs.merge.dev/accounting/common-model-actions/#common_model_actions_retrieve']], 'POST': []} """ - webhook_setup_guide_url: typing.Optional[str] = pydantic_v1.Field() + webhook_setup_guide_url: typing.Optional[str] = pydantic.Field() """ Setup guide URL for third party webhook creation. Exposed in Merge Docs. """ - category_beta_status: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + category_beta_status: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ Category or categories this integration is in beta status for. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/account_token.py b/src/merge/resources/hris/types/account_token.py index 4794fea3..c280c7cb 100644 --- a/src/merge/resources/hris/types/account_token.py +++ b/src/merge/resources/hris/types/account_token.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_integration import AccountIntegration -class AccountToken(pydantic_v1.BaseModel): +class AccountToken(UniversalBaseModel): account_token: str integration: AccountIntegration - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/advanced_metadata.py b/src/merge/resources/hris/types/advanced_metadata.py index 2aa7d1d6..5b0ec9e2 100644 --- a/src/merge/resources/hris/types/advanced_metadata.py +++ b/src/merge/resources/hris/types/advanced_metadata.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class AdvancedMetadata(pydantic_v1.BaseModel): + +class AdvancedMetadata(UniversalBaseModel): id: str display_name: typing.Optional[str] description: typing.Optional[str] @@ -15,20 +15,11 @@ class AdvancedMetadata(pydantic_v1.BaseModel): is_custom: typing.Optional[bool] field_choices: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/async_passthrough_reciept.py b/src/merge/resources/hris/types/async_passthrough_reciept.py index 2cc33210..f2144443 100644 --- a/src/merge/resources/hris/types/async_passthrough_reciept.py +++ b/src/merge/resources/hris/types/async_passthrough_reciept.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class AsyncPassthroughReciept(pydantic_v1.BaseModel): - async_passthrough_receipt_id: str - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class AsyncPassthroughReciept(UniversalBaseModel): + async_passthrough_receipt_id: str - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/audit_log_event.py b/src/merge/resources/hris/types/audit_log_event.py index 43be24a5..c0b4e43a 100644 --- a/src/merge/resources/hris/types/audit_log_event.py +++ b/src/merge/resources/hris/types/audit_log_event.py @@ -3,25 +3,26 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .audit_log_event_event_type import AuditLogEventEventType from .audit_log_event_role import AuditLogEventRole -class AuditLogEvent(pydantic_v1.BaseModel): +class AuditLogEvent(UniversalBaseModel): id: typing.Optional[str] - user_name: typing.Optional[str] = pydantic_v1.Field() + user_name: typing.Optional[str] = pydantic.Field() """ The User's full name at the time of this Event occurring. """ - user_email: typing.Optional[str] = pydantic_v1.Field() + user_email: typing.Optional[str] = pydantic.Field() """ The User's email at the time of this Event occurring. """ - role: AuditLogEventRole = pydantic_v1.Field() + role: AuditLogEventRole = pydantic.Field() """ Designates the role of the user (or SYSTEM/API if action not taken by a user) at the time of this Event occurring. @@ -34,7 +35,7 @@ class AuditLogEvent(pydantic_v1.BaseModel): """ ip_address: str - event_type: AuditLogEventEventType = pydantic_v1.Field() + event_type: AuditLogEventEventType = pydantic.Field() """ Designates the type of event that occurred. @@ -80,20 +81,11 @@ class AuditLogEvent(pydantic_v1.BaseModel): event_description: str created_at: typing.Optional[dt.datetime] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/available_actions.py b/src/merge/resources/hris/types/available_actions.py index bbd94581..1f1d424c 100644 --- a/src/merge/resources/hris/types/available_actions.py +++ b/src/merge/resources/hris/types/available_actions.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_integration import AccountIntegration from .model_operation import ModelOperation -class AvailableActions(pydantic_v1.BaseModel): +class AvailableActions(UniversalBaseModel): """ # The AvailableActions Object @@ -26,20 +26,11 @@ class AvailableActions(pydantic_v1.BaseModel): passthrough_available: bool available_model_operations: typing.Optional[typing.List[ModelOperation]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/bank_info.py b/src/merge/resources/hris/types/bank_info.py index 4d4bdf91..c203b924 100644 --- a/src/merge/resources/hris/types/bank_info.py +++ b/src/merge/resources/hris/types/bank_info.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .bank_info_account_type import BankInfoAccountType from .bank_info_employee import BankInfoEmployee from .remote_data import RemoteData -class BankInfo(pydantic_v1.BaseModel): +class BankInfo(UniversalBaseModel): """ # The BankInfo Object @@ -24,42 +25,42 @@ class BankInfo(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - employee: typing.Optional[BankInfoEmployee] = pydantic_v1.Field() + employee: typing.Optional[BankInfoEmployee] = pydantic.Field() """ The employee with this bank account. """ - account_number: typing.Optional[str] = pydantic_v1.Field() + account_number: typing.Optional[str] = pydantic.Field() """ The account number. """ - routing_number: typing.Optional[str] = pydantic_v1.Field() + routing_number: typing.Optional[str] = pydantic.Field() """ The routing number. """ - bank_name: typing.Optional[str] = pydantic_v1.Field() + bank_name: typing.Optional[str] = pydantic.Field() """ The bank name. """ - account_type: typing.Optional[BankInfoAccountType] = pydantic_v1.Field() + account_type: typing.Optional[BankInfoAccountType] = pydantic.Field() """ The bank account type @@ -67,12 +68,12 @@ class BankInfo(pydantic_v1.BaseModel): - `CHECKING` - CHECKING """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the matching bank object was created in the third party system. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -80,20 +81,11 @@ class BankInfo(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/benefit.py b/src/merge/resources/hris/types/benefit.py index 5d0d2d70..928d49d2 100644 --- a/src/merge/resources/hris/types/benefit.py +++ b/src/merge/resources/hris/types/benefit.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .benefit_employee import BenefitEmployee from .remote_data import RemoteData -class Benefit(pydantic_v1.BaseModel): +class Benefit(UniversalBaseModel): """ # The Benefit Object @@ -23,62 +24,62 @@ class Benefit(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - employee: typing.Optional[BenefitEmployee] = pydantic_v1.Field() + employee: typing.Optional[BenefitEmployee] = pydantic.Field() """ The employee on the plan. """ - provider_name: typing.Optional[str] = pydantic_v1.Field() + provider_name: typing.Optional[str] = pydantic.Field() """ The name of the benefit provider. """ - benefit_plan_type: typing.Optional[str] = pydantic_v1.Field() + benefit_plan_type: typing.Optional[str] = pydantic.Field() """ The type of benefit plan """ - employee_contribution: typing.Optional[float] = pydantic_v1.Field() + employee_contribution: typing.Optional[float] = pydantic.Field() """ The employee's contribution. """ - company_contribution: typing.Optional[float] = pydantic_v1.Field() + company_contribution: typing.Optional[float] = pydantic.Field() """ The company's contribution. """ - start_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_date: typing.Optional[dt.datetime] = pydantic.Field() """ The day and time the benefit started. """ - end_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + end_date: typing.Optional[dt.datetime] = pydantic.Field() """ The day and time the benefit ended. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ - employer_benefit: typing.Optional[str] = pydantic_v1.Field() + employer_benefit: typing.Optional[str] = pydantic.Field() """ The employer benefit plan the employee is enrolled in. """ @@ -86,20 +87,11 @@ class Benefit(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/common_model_scope_api.py b/src/merge/resources/hris/types/common_model_scope_api.py index d6fdec56..093001ba 100644 --- a/src/merge/resources/hris/types/common_model_scope_api.py +++ b/src/merge/resources/hris/types/common_model_scope_api.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .individual_common_model_scope_deserializer import IndividualCommonModelScopeDeserializer -class CommonModelScopeApi(pydantic_v1.BaseModel): - common_models: typing.List[IndividualCommonModelScopeDeserializer] = pydantic_v1.Field() +class CommonModelScopeApi(UniversalBaseModel): + common_models: typing.List[IndividualCommonModelScopeDeserializer] = pydantic.Field() """ The common models you want to update the scopes for """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/common_model_scopes_body_request.py b/src/merge/resources/hris/types/common_model_scopes_body_request.py index e956cbdc..9098f031 100644 --- a/src/merge/resources/hris/types/common_model_scopes_body_request.py +++ b/src/merge/resources/hris/types/common_model_scopes_body_request.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .enabled_actions_enum import EnabledActionsEnum -class CommonModelScopesBodyRequest(pydantic_v1.BaseModel): +class CommonModelScopesBodyRequest(UniversalBaseModel): model_id: str enabled_actions: typing.List[EnabledActionsEnum] disabled_fields: typing.List[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/company.py b/src/merge/resources/hris/types/company.py index 50e78999..00dbe7be 100644 --- a/src/merge/resources/hris/types/company.py +++ b/src/merge/resources/hris/types/company.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData -class Company(pydantic_v1.BaseModel): +class Company(UniversalBaseModel): """ # The Company Object @@ -22,37 +23,37 @@ class Company(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - legal_name: typing.Optional[str] = pydantic_v1.Field() + legal_name: typing.Optional[str] = pydantic.Field() """ The company's legal name. """ - display_name: typing.Optional[str] = pydantic_v1.Field() + display_name: typing.Optional[str] = pydantic.Field() """ The company's display name. """ - eins: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + eins: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ The company's Employer Identification Numbers. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -60,20 +61,11 @@ class Company(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/data_passthrough_request.py b/src/merge/resources/hris/types/data_passthrough_request.py index fdf9b18c..3f9ee090 100644 --- a/src/merge/resources/hris/types/data_passthrough_request.py +++ b/src/merge/resources/hris/types/data_passthrough_request.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .method_enum import MethodEnum from .multipart_form_field_request import MultipartFormFieldRequest from .request_format_enum import RequestFormatEnum -class DataPassthroughRequest(pydantic_v1.BaseModel): +class DataPassthroughRequest(UniversalBaseModel): """ # The DataPassthrough Object @@ -24,51 +24,42 @@ class DataPassthroughRequest(pydantic_v1.BaseModel): """ method: MethodEnum - path: str = pydantic_v1.Field() + path: str = pydantic.Field() """ The path of the request in the third party's platform. """ - base_url_override: typing.Optional[str] = pydantic_v1.Field() + base_url_override: typing.Optional[str] = pydantic.Field() """ An optional override of the third party's base url for the request. """ - data: typing.Optional[str] = pydantic_v1.Field() + data: typing.Optional[str] = pydantic.Field() """ The data with the request. You must include a `request_format` parameter matching the data's format """ - multipart_form_data: typing.Optional[typing.List[MultipartFormFieldRequest]] = pydantic_v1.Field() + multipart_form_data: typing.Optional[typing.List[MultipartFormFieldRequest]] = pydantic.Field() """ Pass an array of `MultipartFormField` objects in here instead of using the `data` param if `request_format` is set to `MULTIPART`. """ - headers: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + headers: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ The headers to use for the request (Merge will handle the account's authorization headers). `Content-Type` header is required for passthrough. Choose content type corresponding to expected format of receiving server. """ request_format: typing.Optional[RequestFormatEnum] - normalize_response: typing.Optional[bool] = pydantic_v1.Field() + normalize_response: typing.Optional[bool] = pydantic.Field() """ Optional. If true, the response will always be an object of the form `{"type": T, "value": ...}` where `T` will be one of `string, boolean, number, null, array, object`. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/debug_mode_log.py b/src/merge/resources/hris/types/debug_mode_log.py index 321c9090..8edea2ae 100644 --- a/src/merge/resources/hris/types/debug_mode_log.py +++ b/src/merge/resources/hris/types/debug_mode_log.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_model_log_summary import DebugModelLogSummary -class DebugModeLog(pydantic_v1.BaseModel): +class DebugModeLog(UniversalBaseModel): log_id: str dashboard_view: str log_summary: DebugModelLogSummary - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/debug_model_log_summary.py b/src/merge/resources/hris/types/debug_model_log_summary.py index 06bb154b..4f5b07db 100644 --- a/src/merge/resources/hris/types/debug_model_log_summary.py +++ b/src/merge/resources/hris/types/debug_model_log_summary.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class DebugModelLogSummary(pydantic_v1.BaseModel): + +class DebugModelLogSummary(UniversalBaseModel): url: str method: str status_code: int - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/deduction.py b/src/merge/resources/hris/types/deduction.py index 35b50490..8be3f389 100644 --- a/src/merge/resources/hris/types/deduction.py +++ b/src/merge/resources/hris/types/deduction.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData -class Deduction(pydantic_v1.BaseModel): +class Deduction(UniversalBaseModel): """ # The Deduction Object @@ -22,38 +23,38 @@ class Deduction(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ employee_payroll_run: typing.Optional[str] - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The deduction's name. """ - employee_deduction: typing.Optional[float] = pydantic_v1.Field() + employee_deduction: typing.Optional[float] = pydantic.Field() """ The amount of money that is withheld from an employee's gross pay by the employee. """ - company_deduction: typing.Optional[float] = pydantic_v1.Field() + company_deduction: typing.Optional[float] = pydantic.Field() """ The amount of money that is withheld on behalf of an employee by the company. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -61,20 +62,11 @@ class Deduction(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/dependent.py b/src/merge/resources/hris/types/dependent.py index 29e5ff2f..91248a68 100644 --- a/src/merge/resources/hris/types/dependent.py +++ b/src/merge/resources/hris/types/dependent.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .dependent_gender import DependentGender from .dependent_relationship import DependentRelationship from .remote_data import RemoteData -class Dependent(pydantic_v1.BaseModel): +class Dependent(UniversalBaseModel): """ # The Dependent Object @@ -24,37 +25,37 @@ class Dependent(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - first_name: typing.Optional[str] = pydantic_v1.Field() + first_name: typing.Optional[str] = pydantic.Field() """ The dependents's first name. """ - middle_name: typing.Optional[str] = pydantic_v1.Field() + middle_name: typing.Optional[str] = pydantic.Field() """ The dependents's middle name. """ - last_name: typing.Optional[str] = pydantic_v1.Field() + last_name: typing.Optional[str] = pydantic.Field() """ The dependents's last name. """ - relationship: typing.Optional[DependentRelationship] = pydantic_v1.Field() + relationship: typing.Optional[DependentRelationship] = pydantic.Field() """ The dependent's relationship to the employee. @@ -63,17 +64,17 @@ class Dependent(pydantic_v1.BaseModel): - `DOMESTIC_PARTNER` - DOMESTIC_PARTNER """ - employee: typing.Optional[str] = pydantic_v1.Field() + employee: typing.Optional[str] = pydantic.Field() """ The employee this person is a dependent of. """ - date_of_birth: typing.Optional[dt.datetime] = pydantic_v1.Field() + date_of_birth: typing.Optional[dt.datetime] = pydantic.Field() """ The dependent's date of birth. """ - gender: typing.Optional[DependentGender] = pydantic_v1.Field() + gender: typing.Optional[DependentGender] = pydantic.Field() """ The dependent's gender. @@ -84,27 +85,27 @@ class Dependent(pydantic_v1.BaseModel): - `PREFER_NOT_TO_DISCLOSE` - PREFER_NOT_TO_DISCLOSE """ - phone_number: typing.Optional[str] = pydantic_v1.Field() + phone_number: typing.Optional[str] = pydantic.Field() """ The dependent's phone number. """ - home_location: typing.Optional[str] = pydantic_v1.Field() + home_location: typing.Optional[str] = pydantic.Field() """ The dependents's home address. """ - is_student: typing.Optional[bool] = pydantic_v1.Field() + is_student: typing.Optional[bool] = pydantic.Field() """ Whether or not the dependent is a student """ - ssn: typing.Optional[str] = pydantic_v1.Field() + ssn: typing.Optional[str] = pydantic.Field() """ The dependents's social security number. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -112,20 +113,11 @@ class Dependent(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/earning.py b/src/merge/resources/hris/types/earning.py index 238eba58..df0ca051 100644 --- a/src/merge/resources/hris/types/earning.py +++ b/src/merge/resources/hris/types/earning.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .earning_type import EarningType from .remote_data import RemoteData -class Earning(pydantic_v1.BaseModel): +class Earning(UniversalBaseModel): """ # The Earning Object @@ -23,28 +24,28 @@ class Earning(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ employee_payroll_run: typing.Optional[str] - amount: typing.Optional[float] = pydantic_v1.Field() + amount: typing.Optional[float] = pydantic.Field() """ The amount earned. """ - type: typing.Optional[EarningType] = pydantic_v1.Field() + type: typing.Optional[EarningType] = pydantic.Field() """ The type of earning. @@ -54,7 +55,7 @@ class Earning(pydantic_v1.BaseModel): - `BONUS` - BONUS """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -62,20 +63,11 @@ class Earning(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/employee.py b/src/merge/resources/hris/types/employee.py index 482742b2..eaef1ee1 100644 --- a/src/merge/resources/hris/types/employee.py +++ b/src/merge/resources/hris/types/employee.py @@ -5,8 +5,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel, update_forward_refs from .employee_company import EmployeeCompany from .employee_employment_status import EmployeeEmploymentStatus from .employee_ethnicity import EmployeeEthnicity @@ -20,7 +21,7 @@ from .remote_data import RemoteData -class Employee(pydantic_v1.BaseModel): +class Employee(UniversalBaseModel): """ # The Employee Object @@ -34,108 +35,108 @@ class Employee(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - employee_number: typing.Optional[str] = pydantic_v1.Field() + employee_number: typing.Optional[str] = pydantic.Field() """ The employee's number that appears in the third-party integration's UI. """ - company: typing.Optional[EmployeeCompany] = pydantic_v1.Field() + company: typing.Optional[EmployeeCompany] = pydantic.Field() """ The ID of the employee's company. """ - first_name: typing.Optional[str] = pydantic_v1.Field() + first_name: typing.Optional[str] = pydantic.Field() """ The employee's first name. """ - last_name: typing.Optional[str] = pydantic_v1.Field() + last_name: typing.Optional[str] = pydantic.Field() """ The employee's last name. """ - preferred_name: typing.Optional[str] = pydantic_v1.Field() + preferred_name: typing.Optional[str] = pydantic.Field() """ The employee's preferred first name. """ - display_full_name: typing.Optional[str] = pydantic_v1.Field() + display_full_name: typing.Optional[str] = pydantic.Field() """ The employee's full name, to use for display purposes. If a preferred first name is available, the full name will include the preferred first name. """ - username: typing.Optional[str] = pydantic_v1.Field() + username: typing.Optional[str] = pydantic.Field() """ The employee's username that appears in the remote UI. """ groups: typing.Optional[typing.List[typing.Optional[EmployeeGroupsItem]]] - work_email: typing.Optional[str] = pydantic_v1.Field() + work_email: typing.Optional[str] = pydantic.Field() """ The employee's work email. """ - personal_email: typing.Optional[str] = pydantic_v1.Field() + personal_email: typing.Optional[str] = pydantic.Field() """ The employee's personal email. """ - mobile_phone_number: typing.Optional[str] = pydantic_v1.Field() + mobile_phone_number: typing.Optional[str] = pydantic.Field() """ The employee's mobile phone number. """ - employments: typing.Optional[typing.List[typing.Optional[EmployeeEmploymentsItem]]] = pydantic_v1.Field() + employments: typing.Optional[typing.List[typing.Optional[EmployeeEmploymentsItem]]] = pydantic.Field() """ Array of `Employment` IDs for this Employee. """ - home_location: typing.Optional[EmployeeHomeLocation] = pydantic_v1.Field() + home_location: typing.Optional[EmployeeHomeLocation] = pydantic.Field() """ The employee's home address. """ - work_location: typing.Optional[EmployeeWorkLocation] = pydantic_v1.Field() + work_location: typing.Optional[EmployeeWorkLocation] = pydantic.Field() """ The employee's work address. """ - manager: typing.Optional[EmployeeManager] = pydantic_v1.Field() + manager: typing.Optional[EmployeeManager] = pydantic.Field() """ The employee ID of the employee's manager. """ - team: typing.Optional[EmployeeTeam] = pydantic_v1.Field() + team: typing.Optional[EmployeeTeam] = pydantic.Field() """ The employee's team. """ - pay_group: typing.Optional[EmployeePayGroup] = pydantic_v1.Field() + pay_group: typing.Optional[EmployeePayGroup] = pydantic.Field() """ The employee's pay group """ - ssn: typing.Optional[str] = pydantic_v1.Field() + ssn: typing.Optional[str] = pydantic.Field() """ The employee's social security number. """ - gender: typing.Optional[EmployeeGender] = pydantic_v1.Field() + gender: typing.Optional[EmployeeGender] = pydantic.Field() """ The employee's gender. @@ -146,7 +147,7 @@ class Employee(pydantic_v1.BaseModel): - `PREFER_NOT_TO_DISCLOSE` - PREFER_NOT_TO_DISCLOSE """ - ethnicity: typing.Optional[EmployeeEthnicity] = pydantic_v1.Field() + ethnicity: typing.Optional[EmployeeEthnicity] = pydantic.Field() """ The employee's ethnicity. @@ -160,7 +161,7 @@ class Employee(pydantic_v1.BaseModel): - `PREFER_NOT_TO_DISCLOSE` - PREFER_NOT_TO_DISCLOSE """ - marital_status: typing.Optional[EmployeeMaritalStatus] = pydantic_v1.Field() + marital_status: typing.Optional[EmployeeMaritalStatus] = pydantic.Field() """ The employee's filing status as related to marital status. @@ -171,27 +172,27 @@ class Employee(pydantic_v1.BaseModel): - `QUALIFYING_WIDOW_OR_WIDOWER_WITH_DEPENDENT_CHILD` - QUALIFYING_WIDOW_OR_WIDOWER_WITH_DEPENDENT_CHILD """ - date_of_birth: typing.Optional[dt.datetime] = pydantic_v1.Field() + date_of_birth: typing.Optional[dt.datetime] = pydantic.Field() """ The employee's date of birth. """ - hire_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + hire_date: typing.Optional[dt.datetime] = pydantic.Field() """ The date that the employee was hired, usually the day that an offer letter is signed. If an employee has multiple hire dates from previous employments, this represents the most recent hire date. Note: If you're looking for the employee's start date, refer to the start_date field. """ - start_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_date: typing.Optional[dt.datetime] = pydantic.Field() """ The date that the employee started working. If an employee was rehired, the most recent start date will be returned. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's employee was created. """ - employment_status: typing.Optional[EmployeeEmploymentStatus] = pydantic_v1.Field() + employment_status: typing.Optional[EmployeeEmploymentStatus] = pydantic.Field() """ The employment status of the employee. @@ -200,17 +201,17 @@ class Employee(pydantic_v1.BaseModel): - `INACTIVE` - INACTIVE """ - termination_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + termination_date: typing.Optional[dt.datetime] = pydantic.Field() """ The employee's termination date. """ - avatar: typing.Optional[str] = pydantic_v1.Field() + avatar: typing.Optional[str] = pydantic.Field() """ The URL of the employee's avatar image. """ - custom_fields: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + custom_fields: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ Custom fields configured for a given model. """ @@ -219,26 +220,17 @@ class Employee(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow from .employee_employments_item import EmployeeEmploymentsItem # noqa: E402 from .employee_manager import EmployeeManager # noqa: E402 -Employee.update_forward_refs() +update_forward_refs(Employee) diff --git a/src/merge/resources/hris/types/employee_payroll_run.py b/src/merge/resources/hris/types/employee_payroll_run.py index a070cd9b..06e5902e 100644 --- a/src/merge/resources/hris/types/employee_payroll_run.py +++ b/src/merge/resources/hris/types/employee_payroll_run.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .deduction import Deduction from .earning import Earning from .employee_payroll_run_employee import EmployeePayrollRunEmployee @@ -13,7 +14,7 @@ from .tax import Tax -class EmployeePayrollRun(pydantic_v1.BaseModel): +class EmployeePayrollRun(UniversalBaseModel): """ # The EmployeePayrollRun Object @@ -27,52 +28,52 @@ class EmployeePayrollRun(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - employee: typing.Optional[EmployeePayrollRunEmployee] = pydantic_v1.Field() + employee: typing.Optional[EmployeePayrollRunEmployee] = pydantic.Field() """ The employee whose payroll is being run. """ - payroll_run: typing.Optional[EmployeePayrollRunPayrollRun] = pydantic_v1.Field() + payroll_run: typing.Optional[EmployeePayrollRunPayrollRun] = pydantic.Field() """ The payroll being run. """ - gross_pay: typing.Optional[float] = pydantic_v1.Field() + gross_pay: typing.Optional[float] = pydantic.Field() """ The total earnings throughout a given period for an employee before any deductions are made. """ - net_pay: typing.Optional[float] = pydantic_v1.Field() + net_pay: typing.Optional[float] = pydantic.Field() """ The take-home pay throughout a given period for an employee after deductions are made. """ - start_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_date: typing.Optional[dt.datetime] = pydantic.Field() """ The day and time the payroll run started. """ - end_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + end_date: typing.Optional[dt.datetime] = pydantic.Field() """ The day and time the payroll run ended. """ - check_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + check_date: typing.Optional[dt.datetime] = pydantic.Field() """ The day and time the payroll run was checked. """ @@ -80,7 +81,7 @@ class EmployeePayrollRun(pydantic_v1.BaseModel): earnings: typing.Optional[typing.List[Earning]] deductions: typing.Optional[typing.List[Deduction]] taxes: typing.Optional[typing.List[Tax]] - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -88,20 +89,11 @@ class EmployeePayrollRun(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/employee_request.py b/src/merge/resources/hris/types/employee_request.py index fa08ade2..d60d6f54 100644 --- a/src/merge/resources/hris/types/employee_request.py +++ b/src/merge/resources/hris/types/employee_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .employee_request_company import EmployeeRequestCompany from .employee_request_employment_status import EmployeeRequestEmploymentStatus from .employee_request_employments_item import EmployeeRequestEmploymentsItem @@ -19,7 +20,7 @@ from .employee_request_work_location import EmployeeRequestWorkLocation -class EmployeeRequest(pydantic_v1.BaseModel): +class EmployeeRequest(UniversalBaseModel): """ # The Employee Object @@ -32,93 +33,93 @@ class EmployeeRequest(pydantic_v1.BaseModel): Fetch from the `LIST Employee` endpoint and filter by `ID` to show all employees. """ - employee_number: typing.Optional[str] = pydantic_v1.Field() + employee_number: typing.Optional[str] = pydantic.Field() """ The employee's number that appears in the third-party integration's UI. """ - company: typing.Optional[EmployeeRequestCompany] = pydantic_v1.Field() + company: typing.Optional[EmployeeRequestCompany] = pydantic.Field() """ The ID of the employee's company. """ - first_name: typing.Optional[str] = pydantic_v1.Field() + first_name: typing.Optional[str] = pydantic.Field() """ The employee's first name. """ - last_name: typing.Optional[str] = pydantic_v1.Field() + last_name: typing.Optional[str] = pydantic.Field() """ The employee's last name. """ - preferred_name: typing.Optional[str] = pydantic_v1.Field() + preferred_name: typing.Optional[str] = pydantic.Field() """ The employee's preferred first name. """ - display_full_name: typing.Optional[str] = pydantic_v1.Field() + display_full_name: typing.Optional[str] = pydantic.Field() """ The employee's full name, to use for display purposes. If a preferred first name is available, the full name will include the preferred first name. """ - username: typing.Optional[str] = pydantic_v1.Field() + username: typing.Optional[str] = pydantic.Field() """ The employee's username that appears in the remote UI. """ groups: typing.Optional[typing.List[typing.Optional[EmployeeRequestGroupsItem]]] - work_email: typing.Optional[str] = pydantic_v1.Field() + work_email: typing.Optional[str] = pydantic.Field() """ The employee's work email. """ - personal_email: typing.Optional[str] = pydantic_v1.Field() + personal_email: typing.Optional[str] = pydantic.Field() """ The employee's personal email. """ - mobile_phone_number: typing.Optional[str] = pydantic_v1.Field() + mobile_phone_number: typing.Optional[str] = pydantic.Field() """ The employee's mobile phone number. """ - employments: typing.Optional[typing.List[typing.Optional[EmployeeRequestEmploymentsItem]]] = pydantic_v1.Field() + employments: typing.Optional[typing.List[typing.Optional[EmployeeRequestEmploymentsItem]]] = pydantic.Field() """ Array of `Employment` IDs for this Employee. """ - home_location: typing.Optional[EmployeeRequestHomeLocation] = pydantic_v1.Field() + home_location: typing.Optional[EmployeeRequestHomeLocation] = pydantic.Field() """ The employee's home address. """ - work_location: typing.Optional[EmployeeRequestWorkLocation] = pydantic_v1.Field() + work_location: typing.Optional[EmployeeRequestWorkLocation] = pydantic.Field() """ The employee's work address. """ - manager: typing.Optional[EmployeeRequestManager] = pydantic_v1.Field() + manager: typing.Optional[EmployeeRequestManager] = pydantic.Field() """ The employee ID of the employee's manager. """ - team: typing.Optional[EmployeeRequestTeam] = pydantic_v1.Field() + team: typing.Optional[EmployeeRequestTeam] = pydantic.Field() """ The employee's team. """ - pay_group: typing.Optional[EmployeeRequestPayGroup] = pydantic_v1.Field() + pay_group: typing.Optional[EmployeeRequestPayGroup] = pydantic.Field() """ The employee's pay group """ - ssn: typing.Optional[str] = pydantic_v1.Field() + ssn: typing.Optional[str] = pydantic.Field() """ The employee's social security number. """ - gender: typing.Optional[EmployeeRequestGender] = pydantic_v1.Field() + gender: typing.Optional[EmployeeRequestGender] = pydantic.Field() """ The employee's gender. @@ -129,7 +130,7 @@ class EmployeeRequest(pydantic_v1.BaseModel): - `PREFER_NOT_TO_DISCLOSE` - PREFER_NOT_TO_DISCLOSE """ - ethnicity: typing.Optional[EmployeeRequestEthnicity] = pydantic_v1.Field() + ethnicity: typing.Optional[EmployeeRequestEthnicity] = pydantic.Field() """ The employee's ethnicity. @@ -143,7 +144,7 @@ class EmployeeRequest(pydantic_v1.BaseModel): - `PREFER_NOT_TO_DISCLOSE` - PREFER_NOT_TO_DISCLOSE """ - marital_status: typing.Optional[EmployeeRequestMaritalStatus] = pydantic_v1.Field() + marital_status: typing.Optional[EmployeeRequestMaritalStatus] = pydantic.Field() """ The employee's filing status as related to marital status. @@ -154,22 +155,22 @@ class EmployeeRequest(pydantic_v1.BaseModel): - `QUALIFYING_WIDOW_OR_WIDOWER_WITH_DEPENDENT_CHILD` - QUALIFYING_WIDOW_OR_WIDOWER_WITH_DEPENDENT_CHILD """ - date_of_birth: typing.Optional[dt.datetime] = pydantic_v1.Field() + date_of_birth: typing.Optional[dt.datetime] = pydantic.Field() """ The employee's date of birth. """ - hire_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + hire_date: typing.Optional[dt.datetime] = pydantic.Field() """ The date that the employee was hired, usually the day that an offer letter is signed. If an employee has multiple hire dates from previous employments, this represents the most recent hire date. Note: If you're looking for the employee's start date, refer to the start_date field. """ - start_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_date: typing.Optional[dt.datetime] = pydantic.Field() """ The date that the employee started working. If an employee was rehired, the most recent start date will be returned. """ - employment_status: typing.Optional[EmployeeRequestEmploymentStatus] = pydantic_v1.Field() + employment_status: typing.Optional[EmployeeRequestEmploymentStatus] = pydantic.Field() """ The employment status of the employee. @@ -178,12 +179,12 @@ class EmployeeRequest(pydantic_v1.BaseModel): - `INACTIVE` - INACTIVE """ - termination_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + termination_date: typing.Optional[dt.datetime] = pydantic.Field() """ The employee's termination date. """ - avatar: typing.Optional[str] = pydantic_v1.Field() + avatar: typing.Optional[str] = pydantic.Field() """ The URL of the employee's avatar image. """ @@ -191,20 +192,11 @@ class EmployeeRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/employee_response.py b/src/merge/resources/hris/types/employee_response.py index 7a01fca3..431b4f95 100644 --- a/src/merge/resources/hris/types/employee_response.py +++ b/src/merge/resources/hris/types/employee_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .employee import Employee from .error_validation_problem import ErrorValidationProblem from .warning_validation_problem import WarningValidationProblem -class EmployeeResponse(pydantic_v1.BaseModel): +class EmployeeResponse(UniversalBaseModel): model: Employee warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/employer_benefit.py b/src/merge/resources/hris/types/employer_benefit.py index 5806a90a..ec617dc5 100644 --- a/src/merge/resources/hris/types/employer_benefit.py +++ b/src/merge/resources/hris/types/employer_benefit.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .employer_benefit_benefit_plan_type import EmployerBenefitBenefitPlanType -class EmployerBenefit(pydantic_v1.BaseModel): +class EmployerBenefit(UniversalBaseModel): """ # The EmployerBenefit Object @@ -22,22 +23,22 @@ class EmployerBenefit(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - benefit_plan_type: typing.Optional[EmployerBenefitBenefitPlanType] = pydantic_v1.Field() + benefit_plan_type: typing.Optional[EmployerBenefitBenefitPlanType] = pydantic.Field() """ The type of benefit plan. @@ -48,22 +49,22 @@ class EmployerBenefit(pydantic_v1.BaseModel): - `OTHER` - OTHER """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The employer benefit's name - typically the carrier or network name. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The employer benefit's description. """ - deduction_code: typing.Optional[str] = pydantic_v1.Field() + deduction_code: typing.Optional[str] = pydantic.Field() """ The employer benefit's deduction code. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -71,20 +72,11 @@ class EmployerBenefit(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[typing.Optional[typing.Dict[str, typing.Any]]]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/employment.py b/src/merge/resources/hris/types/employment.py index 40e9b99b..c65fbbc9 100644 --- a/src/merge/resources/hris/types/employment.py +++ b/src/merge/resources/hris/types/employment.py @@ -5,8 +5,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel, update_forward_refs from .employment_employment_type import EmploymentEmploymentType from .employment_flsa_status import EmploymentFlsaStatus from .employment_pay_currency import EmploymentPayCurrency @@ -16,7 +17,7 @@ from .remote_data import RemoteData -class Employment(pydantic_v1.BaseModel): +class Employment(UniversalBaseModel): """ # The Employment Object @@ -32,37 +33,37 @@ class Employment(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - employee: typing.Optional[EmploymentEmployee] = pydantic_v1.Field() + employee: typing.Optional[EmploymentEmployee] = pydantic.Field() """ The employee holding this position. """ - job_title: typing.Optional[str] = pydantic_v1.Field() + job_title: typing.Optional[str] = pydantic.Field() """ The position's title. """ - pay_rate: typing.Optional[float] = pydantic_v1.Field() + pay_rate: typing.Optional[float] = pydantic.Field() """ The position's pay rate in dollars. """ - pay_period: typing.Optional[EmploymentPayPeriod] = pydantic_v1.Field() + pay_period: typing.Optional[EmploymentPayPeriod] = pydantic.Field() """ The time period this pay rate encompasses. @@ -77,7 +78,7 @@ class Employment(pydantic_v1.BaseModel): - `YEAR` - YEAR """ - pay_frequency: typing.Optional[EmploymentPayFrequency] = pydantic_v1.Field() + pay_frequency: typing.Optional[EmploymentPayFrequency] = pydantic.Field() """ The position's pay frequency. @@ -92,7 +93,7 @@ class Employment(pydantic_v1.BaseModel): - `SEMIMONTHLY` - SEMIMONTHLY """ - pay_currency: typing.Optional[EmploymentPayCurrency] = pydantic_v1.Field() + pay_currency: typing.Optional[EmploymentPayCurrency] = pydantic.Field() """ The position's currency code. @@ -404,12 +405,12 @@ class Employment(pydantic_v1.BaseModel): - `ZWL` - Zimbabwean Dollar (2009) """ - pay_group: typing.Optional[EmploymentPayGroup] = pydantic_v1.Field() + pay_group: typing.Optional[EmploymentPayGroup] = pydantic.Field() """ The employment's pay group """ - flsa_status: typing.Optional[EmploymentFlsaStatus] = pydantic_v1.Field() + flsa_status: typing.Optional[EmploymentFlsaStatus] = pydantic.Field() """ The position's FLSA status. @@ -419,12 +420,12 @@ class Employment(pydantic_v1.BaseModel): - `OWNER` - OWNER """ - effective_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + effective_date: typing.Optional[dt.datetime] = pydantic.Field() """ The position's effective date. """ - employment_type: typing.Optional[EmploymentEmploymentType] = pydantic_v1.Field() + employment_type: typing.Optional[EmploymentEmploymentType] = pydantic.Field() """ The position's type of employment. @@ -435,7 +436,7 @@ class Employment(pydantic_v1.BaseModel): - `FREELANCE` - FREELANCE """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -443,25 +444,16 @@ class Employment(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow from .employment_employee import EmploymentEmployee # noqa: E402 -Employment.update_forward_refs() +update_forward_refs(Employment) diff --git a/src/merge/resources/hris/types/error_validation_problem.py b/src/merge/resources/hris/types/error_validation_problem.py index 425af45c..3838491d 100644 --- a/src/merge/resources/hris/types/error_validation_problem.py +++ b/src/merge/resources/hris/types/error_validation_problem.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .validation_problem_source import ValidationProblemSource -class ErrorValidationProblem(pydantic_v1.BaseModel): +class ErrorValidationProblem(UniversalBaseModel): source: typing.Optional[ValidationProblemSource] title: str detail: str problem_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/external_target_field_api.py b/src/merge/resources/hris/types/external_target_field_api.py index a97d536a..8a971c64 100644 --- a/src/merge/resources/hris/types/external_target_field_api.py +++ b/src/merge/resources/hris/types/external_target_field_api.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ExternalTargetFieldApi(pydantic_v1.BaseModel): + +class ExternalTargetFieldApi(UniversalBaseModel): name: typing.Optional[str] description: typing.Optional[str] is_mapped: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/external_target_field_api_response.py b/src/merge/resources/hris/types/external_target_field_api_response.py index f85881d7..79b4a1e8 100644 --- a/src/merge/resources/hris/types/external_target_field_api_response.py +++ b/src/merge/resources/hris/types/external_target_field_api_response.py @@ -1,49 +1,38 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .external_target_field_api import ExternalTargetFieldApi -class ExternalTargetFieldApiResponse(pydantic_v1.BaseModel): - benefit: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Benefit") - employer_benefit: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="EmployerBenefit") - company: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Company") - employee_payroll_run: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field( +class ExternalTargetFieldApiResponse(UniversalBaseModel): + benefit: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Benefit") + employer_benefit: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="EmployerBenefit") + company: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Company") + employee_payroll_run: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field( alias="EmployeePayrollRun" ) - employee: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Employee") - employment: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Employment") - location: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Location") - payroll_run: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="PayrollRun") - team: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Team") - time_off: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="TimeOff") - time_off_balance: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="TimeOffBalance") - bank_info: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="BankInfo") - pay_group: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="PayGroup") - group: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Group") - dependent: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Dependent") - timesheet_entry: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="TimesheetEntry") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + employee: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Employee") + employment: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Employment") + location: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Location") + payroll_run: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="PayrollRun") + team: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Team") + time_off: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="TimeOff") + time_off_balance: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="TimeOffBalance") + bank_info: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="BankInfo") + pay_group: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="PayGroup") + group: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Group") + dependent: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Dependent") + timesheet_entry: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="TimesheetEntry") + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/field_mapping_api_instance.py b/src/merge/resources/hris/types/field_mapping_api_instance.py index d9d7670d..8af85a52 100644 --- a/src/merge/resources/hris/types/field_mapping_api_instance.py +++ b/src/merge/resources/hris/types/field_mapping_api_instance.py @@ -1,34 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_mapping_api_instance_remote_field import FieldMappingApiInstanceRemoteField from .field_mapping_api_instance_target_field import FieldMappingApiInstanceTargetField -class FieldMappingApiInstance(pydantic_v1.BaseModel): +class FieldMappingApiInstance(UniversalBaseModel): id: typing.Optional[str] is_integration_wide: typing.Optional[bool] target_field: typing.Optional[FieldMappingApiInstanceTargetField] remote_field: typing.Optional[FieldMappingApiInstanceRemoteField] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/field_mapping_api_instance_remote_field.py b/src/merge/resources/hris/types/field_mapping_api_instance_remote_field.py index 9539d3dc..0635b76f 100644 --- a/src/merge/resources/hris/types/field_mapping_api_instance_remote_field.py +++ b/src/merge/resources/hris/types/field_mapping_api_instance_remote_field.py @@ -1,36 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_mapping_api_instance_remote_field_remote_endpoint_info import ( FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo, ) -class FieldMappingApiInstanceRemoteField(pydantic_v1.BaseModel): +class FieldMappingApiInstanceRemoteField(UniversalBaseModel): remote_key_name: str - schema_: typing.Dict[str, typing.Any] = pydantic_v1.Field(alias="schema") + schema_: typing.Dict[str, typing.Any] = pydantic.Field(alias="schema") remote_endpoint_info: FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py b/src/merge/resources/hris/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py index d9fcc276..e34eb6e4 100644 --- a/src/merge/resources/hris/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py +++ b/src/merge/resources/hris/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo(pydantic_v1.BaseModel): + +class FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo(UniversalBaseModel): method: typing.Optional[str] url_path: typing.Optional[str] field_traversal_path: typing.Optional[typing.List[str]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/field_mapping_api_instance_response.py b/src/merge/resources/hris/types/field_mapping_api_instance_response.py index 05a8eefc..8250cf10 100644 --- a/src/merge/resources/hris/types/field_mapping_api_instance_response.py +++ b/src/merge/resources/hris/types/field_mapping_api_instance_response.py @@ -1,49 +1,38 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_mapping_api_instance import FieldMappingApiInstance -class FieldMappingApiInstanceResponse(pydantic_v1.BaseModel): - benefit: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Benefit") - employer_benefit: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="EmployerBenefit") - company: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Company") - employee_payroll_run: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field( +class FieldMappingApiInstanceResponse(UniversalBaseModel): + benefit: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Benefit") + employer_benefit: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="EmployerBenefit") + company: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Company") + employee_payroll_run: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field( alias="EmployeePayrollRun" ) - employee: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Employee") - employment: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Employment") - location: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Location") - payroll_run: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="PayrollRun") - team: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Team") - time_off: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="TimeOff") - time_off_balance: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="TimeOffBalance") - bank_info: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="BankInfo") - pay_group: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="PayGroup") - group: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Group") - dependent: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Dependent") - timesheet_entry: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="TimesheetEntry") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + employee: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Employee") + employment: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Employment") + location: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Location") + payroll_run: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="PayrollRun") + team: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Team") + time_off: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="TimeOff") + time_off_balance: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="TimeOffBalance") + bank_info: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="BankInfo") + pay_group: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="PayGroup") + group: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Group") + dependent: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Dependent") + timesheet_entry: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="TimesheetEntry") + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/field_mapping_api_instance_target_field.py b/src/merge/resources/hris/types/field_mapping_api_instance_target_field.py index 25a8dcff..c590d4ce 100644 --- a/src/merge/resources/hris/types/field_mapping_api_instance_target_field.py +++ b/src/merge/resources/hris/types/field_mapping_api_instance_target_field.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldMappingApiInstanceTargetField(pydantic_v1.BaseModel): + +class FieldMappingApiInstanceTargetField(UniversalBaseModel): name: str description: str is_organization_wide: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/field_mapping_instance_response.py b/src/merge/resources/hris/types/field_mapping_instance_response.py index b55d2c40..aaf06f0e 100644 --- a/src/merge/resources/hris/types/field_mapping_instance_response.py +++ b/src/merge/resources/hris/types/field_mapping_instance_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .field_mapping_api_instance import FieldMappingApiInstance from .warning_validation_problem import WarningValidationProblem -class FieldMappingInstanceResponse(pydantic_v1.BaseModel): +class FieldMappingInstanceResponse(UniversalBaseModel): model: FieldMappingApiInstance warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/field_permission_deserializer.py b/src/merge/resources/hris/types/field_permission_deserializer.py index 124f3deb..ed80b9d6 100644 --- a/src/merge/resources/hris/types/field_permission_deserializer.py +++ b/src/merge/resources/hris/types/field_permission_deserializer.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldPermissionDeserializer(pydantic_v1.BaseModel): + +class FieldPermissionDeserializer(UniversalBaseModel): enabled: typing.Optional[typing.List[typing.Any]] disabled: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/field_permission_deserializer_request.py b/src/merge/resources/hris/types/field_permission_deserializer_request.py index 65e80e75..e937e743 100644 --- a/src/merge/resources/hris/types/field_permission_deserializer_request.py +++ b/src/merge/resources/hris/types/field_permission_deserializer_request.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldPermissionDeserializerRequest(pydantic_v1.BaseModel): + +class FieldPermissionDeserializerRequest(UniversalBaseModel): enabled: typing.Optional[typing.List[typing.Any]] disabled: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/group.py b/src/merge/resources/hris/types/group.py index 06fdfff1..366124c3 100644 --- a/src/merge/resources/hris/types/group.py +++ b/src/merge/resources/hris/types/group.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .group_type import GroupType from .remote_data import RemoteData -class Group(pydantic_v1.BaseModel): +class Group(UniversalBaseModel): """ # The Group Object @@ -23,32 +24,32 @@ class Group(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - parent_group: typing.Optional[str] = pydantic_v1.Field() + parent_group: typing.Optional[str] = pydantic.Field() """ The parent group for this group. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The group name. """ - type: typing.Optional[GroupType] = pydantic_v1.Field() + type: typing.Optional[GroupType] = pydantic.Field() """ The Group type returned directly from the third-party. @@ -59,12 +60,12 @@ class Group(pydantic_v1.BaseModel): - `GROUP` - GROUP """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ - is_commonly_used_as_team: typing.Optional[bool] = pydantic_v1.Field() + is_commonly_used_as_team: typing.Optional[bool] = pydantic.Field() """ Indicates whether the Group refers to a team in the third party platform. Note that this is an opinionated view based on how Merge observes most organizations representing teams in each third party platform. If your customer uses a platform different from most, there is a chance this will not be correct. """ @@ -72,20 +73,11 @@ class Group(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/individual_common_model_scope_deserializer.py b/src/merge/resources/hris/types/individual_common_model_scope_deserializer.py index d80ca06e..ffa55055 100644 --- a/src/merge/resources/hris/types/individual_common_model_scope_deserializer.py +++ b/src/merge/resources/hris/types/individual_common_model_scope_deserializer.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_permission_deserializer import FieldPermissionDeserializer from .model_permission_deserializer import ModelPermissionDeserializer -class IndividualCommonModelScopeDeserializer(pydantic_v1.BaseModel): +class IndividualCommonModelScopeDeserializer(UniversalBaseModel): model_name: str model_permissions: typing.Optional[typing.Dict[str, ModelPermissionDeserializer]] field_permissions: typing.Optional[FieldPermissionDeserializer] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/individual_common_model_scope_deserializer_request.py b/src/merge/resources/hris/types/individual_common_model_scope_deserializer_request.py index 8f2e7de5..d0e68f6d 100644 --- a/src/merge/resources/hris/types/individual_common_model_scope_deserializer_request.py +++ b/src/merge/resources/hris/types/individual_common_model_scope_deserializer_request.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_permission_deserializer_request import FieldPermissionDeserializerRequest from .model_permission_deserializer_request import ModelPermissionDeserializerRequest -class IndividualCommonModelScopeDeserializerRequest(pydantic_v1.BaseModel): +class IndividualCommonModelScopeDeserializerRequest(UniversalBaseModel): model_name: str model_permissions: typing.Optional[typing.Dict[str, ModelPermissionDeserializerRequest]] field_permissions: typing.Optional[FieldPermissionDeserializerRequest] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/issue.py b/src/merge/resources/hris/types/issue.py index 086a0db2..28366731 100644 --- a/src/merge/resources/hris/types/issue.py +++ b/src/merge/resources/hris/types/issue.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .issue_status import IssueStatus -class Issue(pydantic_v1.BaseModel): +class Issue(UniversalBaseModel): id: typing.Optional[str] - status: typing.Optional[IssueStatus] = pydantic_v1.Field() + status: typing.Optional[IssueStatus] = pydantic.Field() """ Status of the issue. Options: ('ONGOING', 'RESOLVED') @@ -25,20 +26,11 @@ class Issue(pydantic_v1.BaseModel): is_muted: typing.Optional[bool] error_details: typing.Optional[typing.List[str]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/link_token.py b/src/merge/resources/hris/types/link_token.py index 1c82d1ac..87c88faf 100644 --- a/src/merge/resources/hris/types/link_token.py +++ b/src/merge/resources/hris/types/link_token.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class LinkToken(pydantic_v1.BaseModel): + +class LinkToken(UniversalBaseModel): link_token: str integration_name: typing.Optional[str] magic_link_url: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/linked_account_status.py b/src/merge/resources/hris/types/linked_account_status.py index 60e21a98..34184012 100644 --- a/src/merge/resources/hris/types/linked_account_status.py +++ b/src/merge/resources/hris/types/linked_account_status.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class LinkedAccountStatus(pydantic_v1.BaseModel): + +class LinkedAccountStatus(UniversalBaseModel): linked_account_status: str can_make_request: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/location.py b/src/merge/resources/hris/types/location.py index 4d0bf55a..15b124d2 100644 --- a/src/merge/resources/hris/types/location.py +++ b/src/merge/resources/hris/types/location.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .location_country import LocationCountry from .location_location_type import LocationLocationType from .remote_data import RemoteData -class Location(pydantic_v1.BaseModel): +class Location(UniversalBaseModel): """ # The Location Object @@ -24,57 +25,57 @@ class Location(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The location's name. """ - phone_number: typing.Optional[str] = pydantic_v1.Field() + phone_number: typing.Optional[str] = pydantic.Field() """ The location's phone number. """ - street_1: typing.Optional[str] = pydantic_v1.Field() + street_1: typing.Optional[str] = pydantic.Field() """ Line 1 of the location's street address. """ - street_2: typing.Optional[str] = pydantic_v1.Field() + street_2: typing.Optional[str] = pydantic.Field() """ Line 2 of the location's street address. """ - city: typing.Optional[str] = pydantic_v1.Field() + city: typing.Optional[str] = pydantic.Field() """ The location's city. """ - state: typing.Optional[str] = pydantic_v1.Field() + state: typing.Optional[str] = pydantic.Field() """ The location's state. Represents a region if outside of the US. """ - zip_code: typing.Optional[str] = pydantic_v1.Field() + zip_code: typing.Optional[str] = pydantic.Field() """ The location's zip code or postal code. """ - country: typing.Optional[LocationCountry] = pydantic_v1.Field() + country: typing.Optional[LocationCountry] = pydantic.Field() """ The location's country. @@ -329,7 +330,7 @@ class Location(pydantic_v1.BaseModel): - `ZW` - Zimbabwe """ - location_type: typing.Optional[LocationLocationType] = pydantic_v1.Field() + location_type: typing.Optional[LocationLocationType] = pydantic.Field() """ The location's type. Can be either WORK or HOME @@ -337,7 +338,7 @@ class Location(pydantic_v1.BaseModel): - `WORK` - WORK """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -345,20 +346,11 @@ class Location(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/meta_response.py b/src/merge/resources/hris/types/meta_response.py index debaf4ef..27e02126 100644 --- a/src/merge/resources/hris/types/meta_response.py +++ b/src/merge/resources/hris/types/meta_response.py @@ -1,34 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .linked_account_status import LinkedAccountStatus -class MetaResponse(pydantic_v1.BaseModel): +class MetaResponse(UniversalBaseModel): request_schema: typing.Dict[str, typing.Any] remote_field_classes: typing.Optional[typing.Dict[str, typing.Any]] status: typing.Optional[LinkedAccountStatus] has_conditional_params: bool has_required_linked_account_params: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/model_operation.py b/src/merge/resources/hris/types/model_operation.py index 0f4429ec..efe8355e 100644 --- a/src/merge/resources/hris/types/model_operation.py +++ b/src/merge/resources/hris/types/model_operation.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelOperation(pydantic_v1.BaseModel): + +class ModelOperation(UniversalBaseModel): """ # The ModelOperation Object @@ -25,20 +25,11 @@ class ModelOperation(pydantic_v1.BaseModel): required_post_parameters: typing.List[str] supported_fields: typing.List[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/model_permission_deserializer.py b/src/merge/resources/hris/types/model_permission_deserializer.py index 5a6adf20..14bc4f99 100644 --- a/src/merge/resources/hris/types/model_permission_deserializer.py +++ b/src/merge/resources/hris/types/model_permission_deserializer.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelPermissionDeserializer(pydantic_v1.BaseModel): - is_enabled: typing.Optional[bool] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ModelPermissionDeserializer(UniversalBaseModel): + is_enabled: typing.Optional[bool] - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/model_permission_deserializer_request.py b/src/merge/resources/hris/types/model_permission_deserializer_request.py index 3f72b9ac..cc2e7f77 100644 --- a/src/merge/resources/hris/types/model_permission_deserializer_request.py +++ b/src/merge/resources/hris/types/model_permission_deserializer_request.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelPermissionDeserializerRequest(pydantic_v1.BaseModel): - is_enabled: typing.Optional[bool] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ModelPermissionDeserializerRequest(UniversalBaseModel): + is_enabled: typing.Optional[bool] - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/multipart_form_field_request.py b/src/merge/resources/hris/types/multipart_form_field_request.py index 9c8ffb21..b6a6c708 100644 --- a/src/merge/resources/hris/types/multipart_form_field_request.py +++ b/src/merge/resources/hris/types/multipart_form_field_request.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .multipart_form_field_request_encoding import MultipartFormFieldRequestEncoding -class MultipartFormFieldRequest(pydantic_v1.BaseModel): +class MultipartFormFieldRequest(UniversalBaseModel): """ # The MultipartFormField Object @@ -21,17 +21,17 @@ class MultipartFormFieldRequest(pydantic_v1.BaseModel): Create a `MultipartFormField` to define a multipart form entry. """ - name: str = pydantic_v1.Field() + name: str = pydantic.Field() """ The name of the form field """ - data: str = pydantic_v1.Field() + data: str = pydantic.Field() """ The data for the form field. """ - encoding: typing.Optional[MultipartFormFieldRequestEncoding] = pydantic_v1.Field() + encoding: typing.Optional[MultipartFormFieldRequestEncoding] = pydantic.Field() """ The encoding of the value of `data`. Defaults to `RAW` if not defined. @@ -40,30 +40,21 @@ class MultipartFormFieldRequest(pydantic_v1.BaseModel): - `GZIP_BASE64` - GZIP_BASE64 """ - file_name: typing.Optional[str] = pydantic_v1.Field() + file_name: typing.Optional[str] = pydantic.Field() """ The file name of the form field, if the field is for a file. """ - content_type: typing.Optional[str] = pydantic_v1.Field() + content_type: typing.Optional[str] = pydantic.Field() """ The MIME type of the file, if the field is for a file. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_account_details_and_actions_list.py b/src/merge/resources/hris/types/paginated_account_details_and_actions_list.py index 280100c4..07323330 100644 --- a/src/merge/resources/hris/types/paginated_account_details_and_actions_list.py +++ b/src/merge/resources/hris/types/paginated_account_details_and_actions_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_details_and_actions import AccountDetailsAndActions -class PaginatedAccountDetailsAndActionsList(pydantic_v1.BaseModel): +class PaginatedAccountDetailsAndActionsList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[AccountDetailsAndActions]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_audit_log_event_list.py b/src/merge/resources/hris/types/paginated_audit_log_event_list.py index 1d4154d2..e5e04fa7 100644 --- a/src/merge/resources/hris/types/paginated_audit_log_event_list.py +++ b/src/merge/resources/hris/types/paginated_audit_log_event_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .audit_log_event import AuditLogEvent -class PaginatedAuditLogEventList(pydantic_v1.BaseModel): +class PaginatedAuditLogEventList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[AuditLogEvent]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_bank_info_list.py b/src/merge/resources/hris/types/paginated_bank_info_list.py index cda1f698..dbf3fae9 100644 --- a/src/merge/resources/hris/types/paginated_bank_info_list.py +++ b/src/merge/resources/hris/types/paginated_bank_info_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .bank_info import BankInfo -class PaginatedBankInfoList(pydantic_v1.BaseModel): +class PaginatedBankInfoList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[BankInfo]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_benefit_list.py b/src/merge/resources/hris/types/paginated_benefit_list.py index a137531e..d095d5fa 100644 --- a/src/merge/resources/hris/types/paginated_benefit_list.py +++ b/src/merge/resources/hris/types/paginated_benefit_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .benefit import Benefit -class PaginatedBenefitList(pydantic_v1.BaseModel): +class PaginatedBenefitList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Benefit]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_company_list.py b/src/merge/resources/hris/types/paginated_company_list.py index d48ead65..d4bf7e06 100644 --- a/src/merge/resources/hris/types/paginated_company_list.py +++ b/src/merge/resources/hris/types/paginated_company_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .company import Company -class PaginatedCompanyList(pydantic_v1.BaseModel): +class PaginatedCompanyList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Company]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_dependent_list.py b/src/merge/resources/hris/types/paginated_dependent_list.py index a4c05c69..63994aef 100644 --- a/src/merge/resources/hris/types/paginated_dependent_list.py +++ b/src/merge/resources/hris/types/paginated_dependent_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .dependent import Dependent -class PaginatedDependentList(pydantic_v1.BaseModel): +class PaginatedDependentList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Dependent]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_employee_list.py b/src/merge/resources/hris/types/paginated_employee_list.py index 92eb0281..508d12b5 100644 --- a/src/merge/resources/hris/types/paginated_employee_list.py +++ b/src/merge/resources/hris/types/paginated_employee_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .employee import Employee -class PaginatedEmployeeList(pydantic_v1.BaseModel): +class PaginatedEmployeeList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Employee]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_employee_payroll_run_list.py b/src/merge/resources/hris/types/paginated_employee_payroll_run_list.py index dcabd04e..27cd83e9 100644 --- a/src/merge/resources/hris/types/paginated_employee_payroll_run_list.py +++ b/src/merge/resources/hris/types/paginated_employee_payroll_run_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .employee_payroll_run import EmployeePayrollRun -class PaginatedEmployeePayrollRunList(pydantic_v1.BaseModel): +class PaginatedEmployeePayrollRunList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[EmployeePayrollRun]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_employer_benefit_list.py b/src/merge/resources/hris/types/paginated_employer_benefit_list.py index 58a8447d..d1e81730 100644 --- a/src/merge/resources/hris/types/paginated_employer_benefit_list.py +++ b/src/merge/resources/hris/types/paginated_employer_benefit_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .employer_benefit import EmployerBenefit -class PaginatedEmployerBenefitList(pydantic_v1.BaseModel): +class PaginatedEmployerBenefitList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[EmployerBenefit]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_employment_list.py b/src/merge/resources/hris/types/paginated_employment_list.py index 53722dce..c4dbeb6c 100644 --- a/src/merge/resources/hris/types/paginated_employment_list.py +++ b/src/merge/resources/hris/types/paginated_employment_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .employment import Employment -class PaginatedEmploymentList(pydantic_v1.BaseModel): +class PaginatedEmploymentList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Employment]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_group_list.py b/src/merge/resources/hris/types/paginated_group_list.py index 723ff3ca..16c73fad 100644 --- a/src/merge/resources/hris/types/paginated_group_list.py +++ b/src/merge/resources/hris/types/paginated_group_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .group import Group -class PaginatedGroupList(pydantic_v1.BaseModel): +class PaginatedGroupList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Group]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_issue_list.py b/src/merge/resources/hris/types/paginated_issue_list.py index 1016e29a..da8437f1 100644 --- a/src/merge/resources/hris/types/paginated_issue_list.py +++ b/src/merge/resources/hris/types/paginated_issue_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .issue import Issue -class PaginatedIssueList(pydantic_v1.BaseModel): +class PaginatedIssueList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Issue]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_location_list.py b/src/merge/resources/hris/types/paginated_location_list.py index 2b8a7794..8a2112dd 100644 --- a/src/merge/resources/hris/types/paginated_location_list.py +++ b/src/merge/resources/hris/types/paginated_location_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .location import Location -class PaginatedLocationList(pydantic_v1.BaseModel): +class PaginatedLocationList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Location]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_pay_group_list.py b/src/merge/resources/hris/types/paginated_pay_group_list.py index 714f10c3..5a7fc513 100644 --- a/src/merge/resources/hris/types/paginated_pay_group_list.py +++ b/src/merge/resources/hris/types/paginated_pay_group_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .pay_group import PayGroup -class PaginatedPayGroupList(pydantic_v1.BaseModel): +class PaginatedPayGroupList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[PayGroup]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_payroll_run_list.py b/src/merge/resources/hris/types/paginated_payroll_run_list.py index fb4e7389..baeec141 100644 --- a/src/merge/resources/hris/types/paginated_payroll_run_list.py +++ b/src/merge/resources/hris/types/paginated_payroll_run_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .payroll_run import PayrollRun -class PaginatedPayrollRunList(pydantic_v1.BaseModel): +class PaginatedPayrollRunList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[PayrollRun]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_sync_status_list.py b/src/merge/resources/hris/types/paginated_sync_status_list.py index 6c88197e..7faca80c 100644 --- a/src/merge/resources/hris/types/paginated_sync_status_list.py +++ b/src/merge/resources/hris/types/paginated_sync_status_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .sync_status import SyncStatus -class PaginatedSyncStatusList(pydantic_v1.BaseModel): +class PaginatedSyncStatusList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[SyncStatus]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_team_list.py b/src/merge/resources/hris/types/paginated_team_list.py index fdb9e669..5d227c70 100644 --- a/src/merge/resources/hris/types/paginated_team_list.py +++ b/src/merge/resources/hris/types/paginated_team_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .team import Team -class PaginatedTeamList(pydantic_v1.BaseModel): +class PaginatedTeamList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Team]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_time_off_balance_list.py b/src/merge/resources/hris/types/paginated_time_off_balance_list.py index 3bf9d969..fa14f273 100644 --- a/src/merge/resources/hris/types/paginated_time_off_balance_list.py +++ b/src/merge/resources/hris/types/paginated_time_off_balance_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .time_off_balance import TimeOffBalance -class PaginatedTimeOffBalanceList(pydantic_v1.BaseModel): +class PaginatedTimeOffBalanceList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[TimeOffBalance]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_time_off_list.py b/src/merge/resources/hris/types/paginated_time_off_list.py index 4d3fbae2..17ccfaad 100644 --- a/src/merge/resources/hris/types/paginated_time_off_list.py +++ b/src/merge/resources/hris/types/paginated_time_off_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .time_off import TimeOff -class PaginatedTimeOffList(pydantic_v1.BaseModel): +class PaginatedTimeOffList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[TimeOff]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/paginated_timesheet_entry_list.py b/src/merge/resources/hris/types/paginated_timesheet_entry_list.py index 51039538..64032b69 100644 --- a/src/merge/resources/hris/types/paginated_timesheet_entry_list.py +++ b/src/merge/resources/hris/types/paginated_timesheet_entry_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .timesheet_entry import TimesheetEntry -class PaginatedTimesheetEntryList(pydantic_v1.BaseModel): +class PaginatedTimesheetEntryList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[TimesheetEntry]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/pay_group.py b/src/merge/resources/hris/types/pay_group.py index 7bfd5af9..6d93296b 100644 --- a/src/merge/resources/hris/types/pay_group.py +++ b/src/merge/resources/hris/types/pay_group.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData -class PayGroup(pydantic_v1.BaseModel): +class PayGroup(UniversalBaseModel): """ # The PayGroup Object @@ -22,27 +23,27 @@ class PayGroup(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - pay_group_name: typing.Optional[str] = pydantic_v1.Field() + pay_group_name: typing.Optional[str] = pydantic.Field() """ The pay group name. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -50,20 +51,11 @@ class PayGroup(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/payroll_run.py b/src/merge/resources/hris/types/payroll_run.py index b33772e5..5e728904 100644 --- a/src/merge/resources/hris/types/payroll_run.py +++ b/src/merge/resources/hris/types/payroll_run.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .payroll_run_run_state import PayrollRunRunState from .payroll_run_run_type import PayrollRunRunType from .remote_data import RemoteData -class PayrollRun(pydantic_v1.BaseModel): +class PayrollRun(UniversalBaseModel): """ # The PayrollRun Object @@ -24,22 +25,22 @@ class PayrollRun(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - run_state: typing.Optional[PayrollRunRunState] = pydantic_v1.Field() + run_state: typing.Optional[PayrollRunRunState] = pydantic.Field() """ The state of the payroll run @@ -50,7 +51,7 @@ class PayrollRun(pydantic_v1.BaseModel): - `CLOSED` - CLOSED """ - run_type: typing.Optional[PayrollRunRunType] = pydantic_v1.Field() + run_type: typing.Optional[PayrollRunRunType] = pydantic.Field() """ The type of the payroll run @@ -61,22 +62,22 @@ class PayrollRun(pydantic_v1.BaseModel): - `SIGN_ON_BONUS` - SIGN_ON_BONUS """ - start_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_date: typing.Optional[dt.datetime] = pydantic.Field() """ The day and time the payroll run started. """ - end_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + end_date: typing.Optional[dt.datetime] = pydantic.Field() """ The day and time the payroll run ended. """ - check_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + check_date: typing.Optional[dt.datetime] = pydantic.Field() """ The day and time the payroll run was checked. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -84,20 +85,11 @@ class PayrollRun(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/remote_data.py b/src/merge/resources/hris/types/remote_data.py index 098f551b..d50bfca2 100644 --- a/src/merge/resources/hris/types/remote_data.py +++ b/src/merge/resources/hris/types/remote_data.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteData(pydantic_v1.BaseModel): + +class RemoteData(UniversalBaseModel): path: str data: typing.Optional[typing.Any] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/remote_endpoint_info.py b/src/merge/resources/hris/types/remote_endpoint_info.py index da6037bc..9f627cae 100644 --- a/src/merge/resources/hris/types/remote_endpoint_info.py +++ b/src/merge/resources/hris/types/remote_endpoint_info.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteEndpointInfo(pydantic_v1.BaseModel): + +class RemoteEndpointInfo(UniversalBaseModel): method: str url_path: str field_traversal_path: typing.List[typing.Any] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/remote_field_api.py b/src/merge/resources/hris/types/remote_field_api.py index c2a16698..1d1efb6a 100644 --- a/src/merge/resources/hris/types/remote_field_api.py +++ b/src/merge/resources/hris/types/remote_field_api.py @@ -1,39 +1,28 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .advanced_metadata import AdvancedMetadata from .remote_endpoint_info import RemoteEndpointInfo from .remote_field_api_coverage import RemoteFieldApiCoverage -class RemoteFieldApi(pydantic_v1.BaseModel): - schema_: typing.Dict[str, typing.Any] = pydantic_v1.Field(alias="schema") +class RemoteFieldApi(UniversalBaseModel): + schema_: typing.Dict[str, typing.Any] = pydantic.Field(alias="schema") remote_key_name: str remote_endpoint_info: RemoteEndpointInfo example_values: typing.List[typing.Any] advanced_metadata: typing.Optional[AdvancedMetadata] coverage: typing.Optional[RemoteFieldApiCoverage] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/remote_field_api_response.py b/src/merge/resources/hris/types/remote_field_api_response.py index 6c3fc84f..b3f253a2 100644 --- a/src/merge/resources/hris/types/remote_field_api_response.py +++ b/src/merge/resources/hris/types/remote_field_api_response.py @@ -1,47 +1,36 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .remote_field_api import RemoteFieldApi - - -class RemoteFieldApiResponse(pydantic_v1.BaseModel): - benefit: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Benefit") - employer_benefit: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="EmployerBenefit") - company: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Company") - employee_payroll_run: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="EmployeePayrollRun") - employee: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Employee") - employment: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Employment") - location: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Location") - payroll_run: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="PayrollRun") - team: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Team") - time_off: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="TimeOff") - time_off_balance: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="TimeOffBalance") - bank_info: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="BankInfo") - pay_group: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="PayGroup") - group: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Group") - dependent: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Dependent") - timesheet_entry: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="TimesheetEntry") +import pydantic - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .remote_field_api import RemoteFieldApi - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} +class RemoteFieldApiResponse(UniversalBaseModel): + benefit: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Benefit") + employer_benefit: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="EmployerBenefit") + company: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Company") + employee_payroll_run: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="EmployeePayrollRun") + employee: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Employee") + employment: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Employment") + location: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Location") + payroll_run: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="PayrollRun") + team: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Team") + time_off: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="TimeOff") + time_off_balance: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="TimeOffBalance") + bank_info: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="BankInfo") + pay_group: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="PayGroup") + group: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Group") + dependent: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Dependent") + timesheet_entry: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="TimesheetEntry") + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/remote_key.py b/src/merge/resources/hris/types/remote_key.py index e0bec368..0ce7d620 100644 --- a/src/merge/resources/hris/types/remote_key.py +++ b/src/merge/resources/hris/types/remote_key.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteKey(pydantic_v1.BaseModel): + +class RemoteKey(UniversalBaseModel): """ # The RemoteKey Object @@ -23,20 +23,11 @@ class RemoteKey(pydantic_v1.BaseModel): name: str key: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/remote_response.py b/src/merge/resources/hris/types/remote_response.py index e7c13c23..8edcb9a8 100644 --- a/src/merge/resources/hris/types/remote_response.py +++ b/src/merge/resources/hris/types/remote_response.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_response_response_type import RemoteResponseResponseType -class RemoteResponse(pydantic_v1.BaseModel): +class RemoteResponse(UniversalBaseModel): """ # The RemoteResponse Object @@ -29,20 +29,11 @@ class RemoteResponse(pydantic_v1.BaseModel): response_type: typing.Optional[RemoteResponseResponseType] headers: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/sync_status.py b/src/merge/resources/hris/types/sync_status.py index c6b7cbc4..03668cbf 100644 --- a/src/merge/resources/hris/types/sync_status.py +++ b/src/merge/resources/hris/types/sync_status.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .selective_sync_configurations_usage_enum import SelectiveSyncConfigurationsUsageEnum from .sync_status_status_enum import SyncStatusStatusEnum -class SyncStatus(pydantic_v1.BaseModel): +class SyncStatus(UniversalBaseModel): """ # The SyncStatus Object @@ -30,20 +31,11 @@ class SyncStatus(pydantic_v1.BaseModel): is_initial_sync: bool selective_sync_configurations_usage: typing.Optional[SelectiveSyncConfigurationsUsageEnum] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/tax.py b/src/merge/resources/hris/types/tax.py index cb8946b0..68702d6e 100644 --- a/src/merge/resources/hris/types/tax.py +++ b/src/merge/resources/hris/types/tax.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData -class Tax(pydantic_v1.BaseModel): +class Tax(UniversalBaseModel): """ # The Tax Object @@ -22,38 +23,38 @@ class Tax(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ employee_payroll_run: typing.Optional[str] - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The tax's name. """ - amount: typing.Optional[float] = pydantic_v1.Field() + amount: typing.Optional[float] = pydantic.Field() """ The tax amount. """ - employer_tax: typing.Optional[bool] = pydantic_v1.Field() + employer_tax: typing.Optional[bool] = pydantic.Field() """ Whether or not the employer is responsible for paying the tax. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -61,20 +62,11 @@ class Tax(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/team.py b/src/merge/resources/hris/types/team.py index 6b41cf48..7d5d0e56 100644 --- a/src/merge/resources/hris/types/team.py +++ b/src/merge/resources/hris/types/team.py @@ -5,12 +5,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel, update_forward_refs from .remote_data import RemoteData -class Team(pydantic_v1.BaseModel): +class Team(UniversalBaseModel): """ # The Team Object @@ -24,32 +25,32 @@ class Team(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The team's name. """ - parent_team: typing.Optional[TeamParentTeam] = pydantic_v1.Field() + parent_team: typing.Optional[TeamParentTeam] = pydantic.Field() """ The team's parent team. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -57,25 +58,16 @@ class Team(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow from .team_parent_team import TeamParentTeam # noqa: E402 -Team.update_forward_refs() +update_forward_refs(Team) diff --git a/src/merge/resources/hris/types/time_off.py b/src/merge/resources/hris/types/time_off.py index 53dc58c7..7437ca64 100644 --- a/src/merge/resources/hris/types/time_off.py +++ b/src/merge/resources/hris/types/time_off.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData from .time_off_approver import TimeOffApprover from .time_off_employee import TimeOffEmployee @@ -13,7 +14,7 @@ from .time_off_units import TimeOffUnits -class TimeOff(pydantic_v1.BaseModel): +class TimeOff(UniversalBaseModel): """ # The TimeOff Object @@ -27,32 +28,32 @@ class TimeOff(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - employee: typing.Optional[TimeOffEmployee] = pydantic_v1.Field() + employee: typing.Optional[TimeOffEmployee] = pydantic.Field() """ The employee requesting time off. """ - approver: typing.Optional[TimeOffApprover] = pydantic_v1.Field() + approver: typing.Optional[TimeOffApprover] = pydantic.Field() """ The Merge ID of the employee with the ability to approve the time off request. """ - status: typing.Optional[TimeOffStatus] = pydantic_v1.Field() + status: typing.Optional[TimeOffStatus] = pydantic.Field() """ The status of this time off request. @@ -63,12 +64,12 @@ class TimeOff(pydantic_v1.BaseModel): - `DELETED` - DELETED """ - employee_note: typing.Optional[str] = pydantic_v1.Field() + employee_note: typing.Optional[str] = pydantic.Field() """ The employee note for this time off request. """ - units: typing.Optional[TimeOffUnits] = pydantic_v1.Field() + units: typing.Optional[TimeOffUnits] = pydantic.Field() """ The measurement that the third-party integration uses to count time requested. @@ -76,12 +77,12 @@ class TimeOff(pydantic_v1.BaseModel): - `DAYS` - DAYS """ - amount: typing.Optional[float] = pydantic_v1.Field() + amount: typing.Optional[float] = pydantic.Field() """ The time off quantity measured by the prescribed “units”. """ - request_type: typing.Optional[TimeOffRequestType] = pydantic_v1.Field() + request_type: typing.Optional[TimeOffRequestType] = pydantic.Field() """ The type of time off request. @@ -93,12 +94,12 @@ class TimeOff(pydantic_v1.BaseModel): - `BEREAVEMENT` - BEREAVEMENT """ - start_time: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_time: typing.Optional[dt.datetime] = pydantic.Field() """ The day and time of the start of the time requested off. """ - end_time: typing.Optional[dt.datetime] = pydantic_v1.Field() + end_time: typing.Optional[dt.datetime] = pydantic.Field() """ The day and time of the end of the time requested off. """ @@ -107,20 +108,11 @@ class TimeOff(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/time_off_balance.py b/src/merge/resources/hris/types/time_off_balance.py index 2b5bf7e9..dce0555d 100644 --- a/src/merge/resources/hris/types/time_off_balance.py +++ b/src/merge/resources/hris/types/time_off_balance.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData from .time_off_balance_employee import TimeOffBalanceEmployee from .time_off_balance_policy_type import TimeOffBalancePolicyType -class TimeOffBalance(pydantic_v1.BaseModel): +class TimeOffBalance(UniversalBaseModel): """ # The TimeOffBalance Object @@ -24,37 +25,37 @@ class TimeOffBalance(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - employee: typing.Optional[TimeOffBalanceEmployee] = pydantic_v1.Field() + employee: typing.Optional[TimeOffBalanceEmployee] = pydantic.Field() """ The employee the balance belongs to. """ - balance: typing.Optional[float] = pydantic_v1.Field() + balance: typing.Optional[float] = pydantic.Field() """ The current remaining PTO balance, always measured in terms of hours. """ - used: typing.Optional[float] = pydantic_v1.Field() + used: typing.Optional[float] = pydantic.Field() """ The amount of PTO used in terms of hours. """ - policy_type: typing.Optional[TimeOffBalancePolicyType] = pydantic_v1.Field() + policy_type: typing.Optional[TimeOffBalancePolicyType] = pydantic.Field() """ The policy type of this time off balance. @@ -66,7 +67,7 @@ class TimeOffBalance(pydantic_v1.BaseModel): - `BEREAVEMENT` - BEREAVEMENT """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -74,20 +75,11 @@ class TimeOffBalance(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/time_off_request.py b/src/merge/resources/hris/types/time_off_request.py index 983cc04a..fb5ec3d0 100644 --- a/src/merge/resources/hris/types/time_off_request.py +++ b/src/merge/resources/hris/types/time_off_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .time_off_request_approver import TimeOffRequestApprover from .time_off_request_employee import TimeOffRequestEmployee from .time_off_request_request_type import TimeOffRequestRequestType @@ -12,7 +13,7 @@ from .time_off_request_units import TimeOffRequestUnits -class TimeOffRequest(pydantic_v1.BaseModel): +class TimeOffRequest(UniversalBaseModel): """ # The TimeOff Object @@ -25,17 +26,17 @@ class TimeOffRequest(pydantic_v1.BaseModel): Fetch from the `LIST TimeOffs` endpoint and filter by `ID` to show all time off requests. """ - employee: typing.Optional[TimeOffRequestEmployee] = pydantic_v1.Field() + employee: typing.Optional[TimeOffRequestEmployee] = pydantic.Field() """ The employee requesting time off. """ - approver: typing.Optional[TimeOffRequestApprover] = pydantic_v1.Field() + approver: typing.Optional[TimeOffRequestApprover] = pydantic.Field() """ The Merge ID of the employee with the ability to approve the time off request. """ - status: typing.Optional[TimeOffRequestStatus] = pydantic_v1.Field() + status: typing.Optional[TimeOffRequestStatus] = pydantic.Field() """ The status of this time off request. @@ -46,12 +47,12 @@ class TimeOffRequest(pydantic_v1.BaseModel): - `DELETED` - DELETED """ - employee_note: typing.Optional[str] = pydantic_v1.Field() + employee_note: typing.Optional[str] = pydantic.Field() """ The employee note for this time off request. """ - units: typing.Optional[TimeOffRequestUnits] = pydantic_v1.Field() + units: typing.Optional[TimeOffRequestUnits] = pydantic.Field() """ The measurement that the third-party integration uses to count time requested. @@ -59,12 +60,12 @@ class TimeOffRequest(pydantic_v1.BaseModel): - `DAYS` - DAYS """ - amount: typing.Optional[float] = pydantic_v1.Field() + amount: typing.Optional[float] = pydantic.Field() """ The time off quantity measured by the prescribed “units”. """ - request_type: typing.Optional[TimeOffRequestRequestType] = pydantic_v1.Field() + request_type: typing.Optional[TimeOffRequestRequestType] = pydantic.Field() """ The type of time off request. @@ -76,12 +77,12 @@ class TimeOffRequest(pydantic_v1.BaseModel): - `BEREAVEMENT` - BEREAVEMENT """ - start_time: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_time: typing.Optional[dt.datetime] = pydantic.Field() """ The day and time of the start of the time requested off. """ - end_time: typing.Optional[dt.datetime] = pydantic_v1.Field() + end_time: typing.Optional[dt.datetime] = pydantic.Field() """ The day and time of the end of the time requested off. """ @@ -89,20 +90,11 @@ class TimeOffRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/time_off_response.py b/src/merge/resources/hris/types/time_off_response.py index 92837ef1..3ab12e52 100644 --- a/src/merge/resources/hris/types/time_off_response.py +++ b/src/merge/resources/hris/types/time_off_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .time_off import TimeOff from .warning_validation_problem import WarningValidationProblem -class TimeOffResponse(pydantic_v1.BaseModel): +class TimeOffResponse(UniversalBaseModel): model: TimeOff warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/timesheet_entry.py b/src/merge/resources/hris/types/timesheet_entry.py index 57b9e21f..32ba3c4a 100644 --- a/src/merge/resources/hris/types/timesheet_entry.py +++ b/src/merge/resources/hris/types/timesheet_entry.py @@ -3,11 +3,12 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class TimesheetEntry(pydantic_v1.BaseModel): + +class TimesheetEntry(UniversalBaseModel): """ # The Timesheet Entry Object @@ -21,42 +22,42 @@ class TimesheetEntry(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - employee: typing.Optional[str] = pydantic_v1.Field() + employee: typing.Optional[str] = pydantic.Field() """ The employee the timesheet entry is for. """ - hours_worked: typing.Optional[float] = pydantic_v1.Field() + hours_worked: typing.Optional[float] = pydantic.Field() """ The number of hours logged by the employee. """ - start_time: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_time: typing.Optional[dt.datetime] = pydantic.Field() """ The time at which the employee started work. """ - end_time: typing.Optional[dt.datetime] = pydantic_v1.Field() + end_time: typing.Optional[dt.datetime] = pydantic.Field() """ The time at which the employee ended work. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -64,20 +65,11 @@ class TimesheetEntry(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[typing.Optional[typing.Dict[str, typing.Any]]]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/timesheet_entry_request.py b/src/merge/resources/hris/types/timesheet_entry_request.py index 83d0593e..cfa047a7 100644 --- a/src/merge/resources/hris/types/timesheet_entry_request.py +++ b/src/merge/resources/hris/types/timesheet_entry_request.py @@ -3,11 +3,12 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class TimesheetEntryRequest(pydantic_v1.BaseModel): + +class TimesheetEntryRequest(UniversalBaseModel): """ # The Timesheet Entry Object @@ -20,22 +21,22 @@ class TimesheetEntryRequest(pydantic_v1.BaseModel): GET and POST Timesheet Entries """ - employee: typing.Optional[str] = pydantic_v1.Field() + employee: typing.Optional[str] = pydantic.Field() """ The employee the timesheet entry is for. """ - hours_worked: typing.Optional[float] = pydantic_v1.Field() + hours_worked: typing.Optional[float] = pydantic.Field() """ The number of hours logged by the employee. """ - start_time: typing.Optional[dt.datetime] = pydantic_v1.Field() + start_time: typing.Optional[dt.datetime] = pydantic.Field() """ The time at which the employee started work. """ - end_time: typing.Optional[dt.datetime] = pydantic_v1.Field() + end_time: typing.Optional[dt.datetime] = pydantic.Field() """ The time at which the employee ended work. """ @@ -43,20 +44,11 @@ class TimesheetEntryRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/timesheet_entry_response.py b/src/merge/resources/hris/types/timesheet_entry_response.py index d27f29fd..f0d04337 100644 --- a/src/merge/resources/hris/types/timesheet_entry_response.py +++ b/src/merge/resources/hris/types/timesheet_entry_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .timesheet_entry import TimesheetEntry from .warning_validation_problem import WarningValidationProblem -class TimesheetEntryResponse(pydantic_v1.BaseModel): +class TimesheetEntryResponse(UniversalBaseModel): model: TimesheetEntry warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/validation_problem_source.py b/src/merge/resources/hris/types/validation_problem_source.py index fde15b40..c65d82ef 100644 --- a/src/merge/resources/hris/types/validation_problem_source.py +++ b/src/merge/resources/hris/types/validation_problem_source.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ValidationProblemSource(pydantic_v1.BaseModel): - pointer: str - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ValidationProblemSource(UniversalBaseModel): + pointer: str - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/warning_validation_problem.py b/src/merge/resources/hris/types/warning_validation_problem.py index 6baf9600..348d668a 100644 --- a/src/merge/resources/hris/types/warning_validation_problem.py +++ b/src/merge/resources/hris/types/warning_validation_problem.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .validation_problem_source import ValidationProblemSource -class WarningValidationProblem(pydantic_v1.BaseModel): +class WarningValidationProblem(UniversalBaseModel): source: typing.Optional[ValidationProblemSource] title: str detail: str problem_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/hris/types/webhook_receiver.py b/src/merge/resources/hris/types/webhook_receiver.py index 0544f256..bb10af95 100644 --- a/src/merge/resources/hris/types/webhook_receiver.py +++ b/src/merge/resources/hris/types/webhook_receiver.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class WebhookReceiver(pydantic_v1.BaseModel): + +class WebhookReceiver(UniversalBaseModel): event: str is_active: bool key: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/resources/account_details/client.py b/src/merge/resources/ticketing/resources/account_details/client.py index 3e1b1dd6..1c4c17dc 100644 --- a/src/merge/resources/ticketing/resources/account_details/client.py +++ b/src/merge/resources/ticketing/resources/account_details/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.account_details import AccountDetails @@ -41,9 +41,9 @@ def retrieve(self, *, request_options: typing.Optional[RequestOptions] = None) - _response = self._client_wrapper.httpx_client.request( "ticketing/v1/account-details", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountDetails, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountDetails, parse_obj_as(type_=AccountDetails, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -70,20 +70,28 @@ async def retrieve(self, *, request_options: typing.Optional[RequestOptions] = N Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.account_details.retrieve() + + + async def main() -> None: + await client.ticketing.account_details.retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/account-details", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountDetails, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountDetails, parse_obj_as(type_=AccountDetails, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/account_token/client.py b/src/merge/resources/ticketing/resources/account_token/client.py index 313a070e..56553805 100644 --- a/src/merge/resources/ticketing/resources/account_token/client.py +++ b/src/merge/resources/ticketing/resources/account_token/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.account_token import AccountToken @@ -48,9 +48,9 @@ def retrieve(self, public_token: str, *, request_options: typing.Optional[Reques method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountToken, parse_obj_as(type_=AccountToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -81,24 +81,32 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.account_token.retrieve( - public_token="public_token", - ) + + + async def main() -> None: + await client.ticketing.account_token.retrieve( + public_token="public_token", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/account-token/{jsonable_encoder(public_token)}", method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AccountToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AccountToken, parse_obj_as(type_=AccountToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/accounts/client.py b/src/merge/resources/ticketing/resources/accounts/client.py index 4c6a9ec8..584422a7 100644 --- a/src/merge/resources/ticketing/resources/accounts/client.py +++ b/src/merge/resources/ticketing/resources/accounts/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.account import Account from ...types.paginated_account_list import PaginatedAccountList @@ -98,9 +98,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountList, parse_obj_as(type_=PaginatedAccountList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -149,9 +149,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Account, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Account, parse_obj_as(type_=Account, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -218,13 +218,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.accounts.list() + + + async def main() -> None: + await client.ticketing.accounts.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/accounts", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountList, parse_obj_as(type_=PaginatedAccountList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -277,15 +285,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.accounts.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ticketing.accounts.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/accounts/{jsonable_encoder(id)}", @@ -293,9 +309,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Account, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Account, parse_obj_as(type_=Account, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/async_passthrough/client.py b/src/merge/resources/ticketing/resources/async_passthrough/client.py index c864dcd9..5df946d9 100644 --- a/src/merge/resources/ticketing/resources/async_passthrough/client.py +++ b/src/merge/resources/ticketing/resources/async_passthrough/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.async_passthrough_reciept import AsyncPassthroughReciept from ...types.data_passthrough_request import DataPassthroughRequest @@ -57,9 +57,9 @@ def create( _response = self._client_wrapper.httpx_client.request( "ticketing/v1/async-passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AsyncPassthroughReciept, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AsyncPassthroughReciept, parse_obj_as(type_=AsyncPassthroughReciept, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -100,9 +100,9 @@ def retrieve( method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -133,6 +133,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ticketing import DataPassthroughRequest, MethodEnum @@ -140,19 +142,25 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.async_passthrough.create( - request=DataPassthroughRequest( - method=MethodEnum.GET, - path="/scooters", - ), - ) + + + async def main() -> None: + await client.ticketing.async_passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/async-passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AsyncPassthroughReciept, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AsyncPassthroughReciept, parse_obj_as(type_=AsyncPassthroughReciept, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -178,24 +186,32 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.async_passthrough.retrieve( - async_passthrough_receipt_id="async_passthrough_receipt_id", - ) + + + async def main() -> None: + await client.ticketing.async_passthrough.retrieve( + async_passthrough_receipt_id="async_passthrough_receipt_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/async-passthrough/{jsonable_encoder(async_passthrough_receipt_id)}", method="GET", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/attachments/client.py b/src/merge/resources/ticketing/resources/attachments/client.py index 6a4ce621..aa6a6573 100644 --- a/src/merge/resources/ticketing/resources/attachments/client.py +++ b/src/merge/resources/ticketing/resources/attachments/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.attachment import Attachment from ...types.attachment_request import AttachmentRequest @@ -121,9 +121,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAttachmentList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAttachmentList, parse_obj_as(type_=PaginatedAttachmentList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -179,9 +179,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TicketingAttachmentResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TicketingAttachmentResponse, parse_obj_as(type_=TicketingAttachmentResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -234,9 +234,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Attachment, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Attachment, parse_obj_as(type_=Attachment, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -286,12 +286,12 @@ def download_retrieve( params={"mime_type": mime_type}, request_options=request_options, ) as _response: - if 200 <= _response.status_code < 300: - for _chunk in _response.iter_bytes(): - yield _chunk - return - _response.read() try: + if 200 <= _response.status_code < 300: + for _chunk in _response.iter_bytes(): + yield _chunk + return + _response.read() _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -324,9 +324,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "ticketing/v1/attachments/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -405,13 +405,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.attachments.list() + + + async def main() -> None: + await client.ticketing.attachments.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/attachments", @@ -434,9 +442,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAttachmentList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAttachmentList, parse_obj_as(type_=PaginatedAttachmentList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -473,6 +481,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ticketing import AttachmentRequest @@ -480,9 +490,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.attachments.create( - model=AttachmentRequest(), - ) + + + async def main() -> None: + await client.ticketing.attachments.create( + model=AttachmentRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/attachments", @@ -492,9 +508,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TicketingAttachmentResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TicketingAttachmentResponse, parse_obj_as(type_=TicketingAttachmentResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -531,15 +547,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.attachments.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ticketing.attachments.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/attachments/{jsonable_encoder(id)}", @@ -547,9 +571,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Attachment, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Attachment, parse_obj_as(type_=Attachment, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -582,16 +606,24 @@ async def download_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.attachments.download_retrieve( - id="string", - mime_type="string", - ) + + + async def main() -> None: + await client.ticketing.attachments.download_retrieve( + id="string", + mime_type="string", + ) + + + asyncio.run(main()) """ async with self._client_wrapper.httpx_client.stream( f"ticketing/v1/attachments/{jsonable_encoder(id)}/download", @@ -599,12 +631,12 @@ async def download_retrieve( params={"mime_type": mime_type}, request_options=request_options, ) as _response: - if 200 <= _response.status_code < 300: - async for _chunk in _response.aiter_bytes(): - yield _chunk - return - await _response.aread() try: + if 200 <= _response.status_code < 300: + async for _chunk in _response.aiter_bytes(): + yield _chunk + return + await _response.aread() _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -626,20 +658,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.attachments.meta_post_retrieve() + + + async def main() -> None: + await client.ticketing.attachments.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/attachments/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/audit_trail/client.py b/src/merge/resources/ticketing/resources/audit_trail/client.py index fc8e5e61..44a2d28f 100644 --- a/src/merge/resources/ticketing/resources/audit_trail/client.py +++ b/src/merge/resources/ticketing/resources/audit_trail/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_audit_log_event_list import PaginatedAuditLogEventList @@ -79,9 +79,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAuditLogEventList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAuditLogEventList, parse_obj_as(type_=PaginatedAuditLogEventList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -136,13 +136,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.audit_trail.list() + + + async def main() -> None: + await client.ticketing.audit_trail.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/audit-trail", @@ -157,9 +165,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAuditLogEventList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAuditLogEventList, parse_obj_as(type_=PaginatedAuditLogEventList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/available_actions/client.py b/src/merge/resources/ticketing/resources/available_actions/client.py index 5d01997a..77631398 100644 --- a/src/merge/resources/ticketing/resources/available_actions/client.py +++ b/src/merge/resources/ticketing/resources/available_actions/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.available_actions import AvailableActions @@ -41,9 +41,9 @@ def retrieve(self, *, request_options: typing.Optional[RequestOptions] = None) - _response = self._client_wrapper.httpx_client.request( "ticketing/v1/available-actions", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AvailableActions, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AvailableActions, parse_obj_as(type_=AvailableActions, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -70,20 +70,28 @@ async def retrieve(self, *, request_options: typing.Optional[RequestOptions] = N Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.available_actions.retrieve() + + + async def main() -> None: + await client.ticketing.available_actions.retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/available-actions", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AvailableActions, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(AvailableActions, parse_obj_as(type_=AvailableActions, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/collections/client.py b/src/merge/resources/ticketing/resources/collections/client.py index aa19e9db..22ca1ddb 100644 --- a/src/merge/resources/ticketing/resources/collections/client.py +++ b/src/merge/resources/ticketing/resources/collections/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.collection import Collection from ...types.paginated_collection_list import PaginatedCollectionList @@ -125,9 +125,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCollectionList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCollectionList, parse_obj_as(type_=PaginatedCollectionList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -193,9 +193,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Collection, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Collection, parse_obj_as(type_=Collection, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -266,9 +266,9 @@ def users_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedUserList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedUserList, parse_obj_as(type_=PaginatedUserList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -355,13 +355,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.collections.list() + + + async def main() -> None: + await client.ticketing.collections.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/collections", @@ -384,9 +392,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCollectionList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCollectionList, parse_obj_as(type_=PaginatedCollectionList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -431,15 +439,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.collections.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ticketing.collections.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/collections/{jsonable_encoder(id)}", @@ -452,9 +468,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Collection, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Collection, parse_obj_as(type_=Collection, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -503,15 +519,23 @@ async def users_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.collections.users_list( - parent_id="parent_id", - ) + + + async def main() -> None: + await client.ticketing.collections.users_list( + parent_id="parent_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/collections/{jsonable_encoder(parent_id)}/users", @@ -525,9 +549,9 @@ async def users_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedUserList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedUserList, parse_obj_as(type_=PaginatedUserList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/comments/client.py b/src/merge/resources/ticketing/resources/comments/client.py index 76182e8f..948fa054 100644 --- a/src/merge/resources/ticketing/resources/comments/client.py +++ b/src/merge/resources/ticketing/resources/comments/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.comment import Comment from ...types.comment_request import CommentRequest @@ -123,9 +123,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCommentList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCommentList, parse_obj_as(type_=PaginatedCommentList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -181,9 +181,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommentResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommentResponse, parse_obj_as(type_=CommentResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -236,9 +236,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Comment, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Comment, parse_obj_as(type_=Comment, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -271,9 +271,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "ticketing/v1/comments/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -352,13 +352,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.comments.list() + + + async def main() -> None: + await client.ticketing.comments.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/comments", @@ -381,9 +389,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedCommentList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedCommentList, parse_obj_as(type_=PaginatedCommentList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -420,6 +428,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ticketing import CommentRequest @@ -427,9 +437,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.comments.create( - model=CommentRequest(), - ) + + + async def main() -> None: + await client.ticketing.comments.create( + model=CommentRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/comments", @@ -439,9 +455,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommentResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommentResponse, parse_obj_as(type_=CommentResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -478,15 +494,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.comments.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ticketing.comments.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/comments/{jsonable_encoder(id)}", @@ -494,9 +518,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Comment, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Comment, parse_obj_as(type_=Comment, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -518,20 +542,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.comments.meta_post_retrieve() + + + async def main() -> None: + await client.ticketing.comments.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/comments/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/contacts/client.py b/src/merge/resources/ticketing/resources/contacts/client.py index dbc23ba0..bd12128b 100644 --- a/src/merge/resources/ticketing/resources/contacts/client.py +++ b/src/merge/resources/ticketing/resources/contacts/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.contact import Contact from ...types.contact_request import ContactRequest @@ -109,9 +109,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedContactList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedContactList, parse_obj_as(type_=PaginatedContactList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -167,9 +167,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TicketingContactResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TicketingContactResponse, parse_obj_as(type_=TicketingContactResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -222,9 +222,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Contact, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Contact, parse_obj_as(type_=Contact, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -257,9 +257,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "ticketing/v1/contacts/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -330,13 +330,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.contacts.list() + + + async def main() -> None: + await client.ticketing.contacts.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/contacts", @@ -355,9 +363,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedContactList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedContactList, parse_obj_as(type_=PaginatedContactList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -394,6 +402,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ticketing import ContactRequest @@ -401,9 +411,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.contacts.create( - model=ContactRequest(), - ) + + + async def main() -> None: + await client.ticketing.contacts.create( + model=ContactRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/contacts", @@ -413,9 +429,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TicketingContactResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TicketingContactResponse, parse_obj_as(type_=TicketingContactResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -452,15 +468,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.contacts.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ticketing.contacts.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/contacts/{jsonable_encoder(id)}", @@ -468,9 +492,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Contact, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Contact, parse_obj_as(type_=Contact, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -492,20 +516,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.contacts.meta_post_retrieve() + + + async def main() -> None: + await client.ticketing.contacts.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/contacts/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/delete_account/client.py b/src/merge/resources/ticketing/resources/delete_account/client.py index 91a8cdaa..8ecaaef4 100644 --- a/src/merge/resources/ticketing/resources/delete_account/client.py +++ b/src/merge/resources/ticketing/resources/delete_account/client.py @@ -38,9 +38,9 @@ def delete(self, *, request_options: typing.Optional[RequestOptions] = None) -> _response = self._client_wrapper.httpx_client.request( "ticketing/v1/delete-account", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -66,20 +66,28 @@ async def delete(self, *, request_options: typing.Optional[RequestOptions] = Non Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.delete_account.delete() + + + async def main() -> None: + await client.ticketing.delete_account.delete() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/delete-account", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return try: + if 200 <= _response.status_code < 300: + return _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/field_mapping/client.py b/src/merge/resources/ticketing/resources/field_mapping/client.py index f8b76b08..0e066c70 100644 --- a/src/merge/resources/ticketing/resources/field_mapping/client.py +++ b/src/merge/resources/ticketing/resources/field_mapping/client.py @@ -6,7 +6,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.external_target_field_api_response import ExternalTargetFieldApiResponse from ...types.field_mapping_api_instance_response import FieldMappingApiInstanceResponse @@ -50,9 +50,9 @@ def field_mappings_retrieve( _response = self._client_wrapper.httpx_client.request( "ticketing/v1/field-mappings", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingApiInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingApiInstanceResponse, parse_obj_as(type_=FieldMappingApiInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -131,9 +131,9 @@ def field_mappings_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -174,9 +174,9 @@ def field_mappings_destroy( method="DELETE", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -238,9 +238,9 @@ def field_mappings_partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -288,9 +288,9 @@ def remote_fields_retrieve( params={"common_models": common_models, "include_example_values": include_example_values}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteFieldApiResponse, parse_obj_as(type_=RemoteFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -325,9 +325,9 @@ def target_fields_retrieve( _response = self._client_wrapper.httpx_client.request( "ticketing/v1/target-fields", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExternalTargetFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ExternalTargetFieldApiResponse, parse_obj_as(type_=ExternalTargetFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -356,20 +356,28 @@ async def field_mappings_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.field_mapping.field_mappings_retrieve() + + + async def main() -> None: + await client.ticketing.field_mapping.field_mappings_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/field-mappings", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingApiInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingApiInstanceResponse, parse_obj_as(type_=FieldMappingApiInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -419,20 +427,28 @@ async def field_mappings_create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.field_mapping.field_mappings_create( - target_field_name="example_target_field_name", - target_field_description="this is a example description of the target field", - remote_field_traversal_path=["example_remote_field"], - remote_method="GET", - remote_url_path="/example-url-path", - common_model_name="ExampleCommonModel", - ) + + + async def main() -> None: + await client.ticketing.field_mapping.field_mappings_create( + target_field_name="example_target_field_name", + target_field_description="this is a example description of the target field", + remote_field_traversal_path=["example_remote_field"], + remote_method="GET", + remote_url_path="/example-url-path", + common_model_name="ExampleCommonModel", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/field-mappings", @@ -448,9 +464,9 @@ async def field_mappings_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -476,24 +492,32 @@ async def field_mappings_destroy( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.field_mapping.field_mappings_destroy( - field_mapping_id="field_mapping_id", - ) + + + async def main() -> None: + await client.ticketing.field_mapping.field_mappings_destroy( + field_mapping_id="field_mapping_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/field-mappings/{jsonable_encoder(field_mapping_id)}", method="DELETE", request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -534,15 +558,23 @@ async def field_mappings_partial_update( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.field_mapping.field_mappings_partial_update( - field_mapping_id="field_mapping_id", - ) + + + async def main() -> None: + await client.ticketing.field_mapping.field_mappings_partial_update( + field_mapping_id="field_mapping_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/field-mappings/{jsonable_encoder(field_mapping_id)}", @@ -555,9 +587,9 @@ async def field_mappings_partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(FieldMappingInstanceResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(FieldMappingInstanceResponse, parse_obj_as(type_=FieldMappingInstanceResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -591,13 +623,21 @@ async def remote_fields_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.field_mapping.remote_fields_retrieve() + + + async def main() -> None: + await client.ticketing.field_mapping.remote_fields_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/remote-fields", @@ -605,9 +645,9 @@ async def remote_fields_retrieve( params={"common_models": common_models, "include_example_values": include_example_values}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteFieldApiResponse, parse_obj_as(type_=RemoteFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -631,20 +671,28 @@ async def target_fields_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.field_mapping.target_fields_retrieve() + + + async def main() -> None: + await client.ticketing.field_mapping.target_fields_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/target-fields", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(ExternalTargetFieldApiResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(ExternalTargetFieldApiResponse, parse_obj_as(type_=ExternalTargetFieldApiResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/force_resync/client.py b/src/merge/resources/ticketing/resources/force_resync/client.py index ebecec49..314e86fc 100644 --- a/src/merge/resources/ticketing/resources/force_resync/client.py +++ b/src/merge/resources/ticketing/resources/force_resync/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.sync_status import SyncStatus @@ -43,9 +43,9 @@ def sync_status_resync_create( _response = self._client_wrapper.httpx_client.request( "ticketing/v1/sync-status/resync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[SyncStatus], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[SyncStatus], parse_obj_as(type_=typing.List[SyncStatus], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -74,20 +74,28 @@ async def sync_status_resync_create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.force_resync.sync_status_resync_create() + + + async def main() -> None: + await client.ticketing.force_resync.sync_status_resync_create() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/sync-status/resync", method="POST", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[SyncStatus], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[SyncStatus], parse_obj_as(type_=typing.List[SyncStatus], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/generate_key/client.py b/src/merge/resources/ticketing/resources/generate_key/client.py index da64e687..a2197c41 100644 --- a/src/merge/resources/ticketing/resources/generate_key/client.py +++ b/src/merge/resources/ticketing/resources/generate_key/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.remote_key import RemoteKey @@ -49,9 +49,9 @@ def create(self, *, name: str, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "ticketing/v1/generate-key", method="POST", json={"name": name}, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -81,22 +81,30 @@ async def create(self, *, name: str, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.generate_key.create( - name="Remote Deployment Key 1", - ) + + + async def main() -> None: + await client.ticketing.generate_key.create( + name="Remote Deployment Key 1", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/generate-key", method="POST", json={"name": name}, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/issues/client.py b/src/merge/resources/ticketing/resources/issues/client.py index e80f54c3..c23abb3d 100644 --- a/src/merge/resources/ticketing/resources/issues/client.py +++ b/src/merge/resources/ticketing/resources/issues/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.issue import Issue from ...types.paginated_issue_list import PaginatedIssueList @@ -127,9 +127,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedIssueList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedIssueList, parse_obj_as(type_=PaginatedIssueList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -166,9 +166,9 @@ def retrieve(self, id: str, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( f"ticketing/v1/issues/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Issue, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Issue, parse_obj_as(type_=Issue, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -251,13 +251,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.issues.list() + + + async def main() -> None: + await client.ticketing.issues.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/issues", @@ -287,9 +295,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedIssueList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedIssueList, parse_obj_as(type_=PaginatedIssueList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -313,22 +321,30 @@ async def retrieve(self, id: str, *, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.issues.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ticketing.issues.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/issues/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Issue, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Issue, parse_obj_as(type_=Issue, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/link_token/client.py b/src/merge/resources/ticketing/resources/link_token/client.py index 036ab22e..b0eadde9 100644 --- a/src/merge/resources/ticketing/resources/link_token/client.py +++ b/src/merge/resources/ticketing/resources/link_token/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.categories_enum import CategoriesEnum from ...types.common_model_scopes_body_request import CommonModelScopesBodyRequest @@ -119,9 +119,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LinkToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(LinkToken, parse_obj_as(type_=LinkToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -198,6 +198,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ticketing import CategoriesEnum @@ -205,12 +207,18 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.link_token.create( - end_user_email_address="example@gmail.com", - end_user_organization_name="Test Organization", - end_user_origin_id="12345", - categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], - ) + + + async def main() -> None: + await client.ticketing.link_token.create( + end_user_email_address="example@gmail.com", + end_user_organization_name="Test Organization", + end_user_origin_id="12345", + categories=[CategoriesEnum.HRIS, CategoriesEnum.ATS], + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/link-token", @@ -231,9 +239,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(LinkToken, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(LinkToken, parse_obj_as(type_=LinkToken, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/linked_accounts/client.py b/src/merge/resources/ticketing/resources/linked_accounts/client.py index 50bbac1a..68153596 100644 --- a/src/merge/resources/ticketing/resources/linked_accounts/client.py +++ b/src/merge/resources/ticketing/resources/linked_accounts/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_account_details_and_actions_list import PaginatedAccountDetailsAndActionsList from .types.linked_accounts_list_request_category import LinkedAccountsListRequestCategory @@ -122,9 +122,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountDetailsAndActionsList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountDetailsAndActionsList, parse_obj_as(type_=PaginatedAccountDetailsAndActionsList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -214,13 +214,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.linked_accounts.list() + + + async def main() -> None: + await client.ticketing.linked_accounts.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/linked-accounts", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedAccountDetailsAndActionsList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedAccountDetailsAndActionsList, parse_obj_as(type_=PaginatedAccountDetailsAndActionsList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/passthrough/client.py b/src/merge/resources/ticketing/resources/passthrough/client.py index 28653534..a09d47fe 100644 --- a/src/merge/resources/ticketing/resources/passthrough/client.py +++ b/src/merge/resources/ticketing/resources/passthrough/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.data_passthrough_request import DataPassthroughRequest from ...types.remote_response import RemoteResponse @@ -55,9 +55,9 @@ def create( _response = self._client_wrapper.httpx_client.request( "ticketing/v1/passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -88,6 +88,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ticketing import DataPassthroughRequest, MethodEnum @@ -95,19 +97,25 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.passthrough.create( - request=DataPassthroughRequest( - method=MethodEnum.GET, - path="/scooters", - ), - ) + + + async def main() -> None: + await client.ticketing.passthrough.create( + request=DataPassthroughRequest( + method=MethodEnum.GET, + path="/scooters", + ), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/passthrough", method="POST", json=request, request_options=request_options, omit=OMIT ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteResponse, parse_obj_as(type_=RemoteResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/projects/client.py b/src/merge/resources/ticketing/resources/projects/client.py index 28ee1c1c..06ed49aa 100644 --- a/src/merge/resources/ticketing/resources/projects/client.py +++ b/src/merge/resources/ticketing/resources/projects/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_project_list import PaginatedProjectList from ...types.paginated_user_list import PaginatedUserList @@ -100,9 +100,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedProjectList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedProjectList, parse_obj_as(type_=PaginatedProjectList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -151,9 +151,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Project, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Project, parse_obj_as(type_=Project, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -224,9 +224,9 @@ def users_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedUserList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedUserList, parse_obj_as(type_=PaginatedUserList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -293,13 +293,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.projects.list() + + + async def main() -> None: + await client.ticketing.projects.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/projects", @@ -317,9 +325,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedProjectList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedProjectList, parse_obj_as(type_=PaginatedProjectList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -352,15 +360,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.projects.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ticketing.projects.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/projects/{jsonable_encoder(id)}", @@ -368,9 +384,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Project, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Project, parse_obj_as(type_=Project, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -419,15 +435,23 @@ async def users_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.projects.users_list( - parent_id="parent_id", - ) + + + async def main() -> None: + await client.ticketing.projects.users_list( + parent_id="parent_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/projects/{jsonable_encoder(parent_id)}/users", @@ -441,9 +465,9 @@ async def users_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedUserList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedUserList, parse_obj_as(type_=PaginatedUserList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/regenerate_key/client.py b/src/merge/resources/ticketing/resources/regenerate_key/client.py index e429d9d2..f1dc910f 100644 --- a/src/merge/resources/ticketing/resources/regenerate_key/client.py +++ b/src/merge/resources/ticketing/resources/regenerate_key/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.remote_key import RemoteKey @@ -53,9 +53,9 @@ def create(self, *, name: str, request_options: typing.Optional[RequestOptions] request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -85,15 +85,23 @@ async def create(self, *, name: str, request_options: typing.Optional[RequestOpt Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.regenerate_key.create( - name="Remote Deployment Key 1", - ) + + + async def main() -> None: + await client.ticketing.regenerate_key.create( + name="Remote Deployment Key 1", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/regenerate-key", @@ -102,9 +110,9 @@ async def create(self, *, name: str, request_options: typing.Optional[RequestOpt request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(RemoteKey, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(RemoteKey, parse_obj_as(type_=RemoteKey, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/roles/client.py b/src/merge/resources/ticketing/resources/roles/client.py index 2462969a..528b24d7 100644 --- a/src/merge/resources/ticketing/resources/roles/client.py +++ b/src/merge/resources/ticketing/resources/roles/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_role_list import PaginatedRoleList from ...types.role import Role @@ -98,9 +98,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRoleList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRoleList, parse_obj_as(type_=PaginatedRoleList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -149,9 +149,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Role, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Role, parse_obj_as(type_=Role, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -218,13 +218,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.roles.list() + + + async def main() -> None: + await client.ticketing.roles.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/roles", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRoleList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRoleList, parse_obj_as(type_=PaginatedRoleList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -277,15 +285,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.roles.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ticketing.roles.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/roles/{jsonable_encoder(id)}", @@ -293,9 +309,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Role, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Role, parse_obj_as(type_=Role, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/scopes/client.py b/src/merge/resources/ticketing/resources/scopes/client.py index 4aaea998..d8c085f6 100644 --- a/src/merge/resources/ticketing/resources/scopes/client.py +++ b/src/merge/resources/ticketing/resources/scopes/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.common_model_scope_api import CommonModelScopeApi from ...types.individual_common_model_scope_deserializer_request import IndividualCommonModelScopeDeserializerRequest @@ -47,9 +47,9 @@ def default_scopes_retrieve( _response = self._client_wrapper.httpx_client.request( "ticketing/v1/default-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -84,9 +84,9 @@ def linked_account_scopes_retrieve( _response = self._client_wrapper.httpx_client.request( "ticketing/v1/linked-account-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -157,9 +157,9 @@ def linked_account_scopes_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -188,20 +188,28 @@ async def default_scopes_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.scopes.default_scopes_retrieve() + + + async def main() -> None: + await client.ticketing.scopes.default_scopes_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/default-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -225,20 +233,28 @@ async def linked_account_scopes_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.scopes.linked_account_scopes_retrieve() + + + async def main() -> None: + await client.ticketing.scopes.linked_account_scopes_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/linked-account-scopes", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -268,6 +284,8 @@ async def linked_account_scopes_create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ticketing import ( IndividualCommonModelScopeDeserializerRequest, @@ -278,29 +296,35 @@ async def linked_account_scopes_create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.scopes.linked_account_scopes_create( - common_models=[ - IndividualCommonModelScopeDeserializerRequest( - model_name="Employee", - model_permissions={ - "READ": ModelPermissionDeserializerRequest( - is_enabled=True, - ), - "WRITE": ModelPermissionDeserializerRequest( - is_enabled=False, - ), - }, - ), - IndividualCommonModelScopeDeserializerRequest( - model_name="Benefit", - model_permissions={ - "WRITE": ModelPermissionDeserializerRequest( - is_enabled=False, - ) - }, - ), - ], - ) + + + async def main() -> None: + await client.ticketing.scopes.linked_account_scopes_create( + common_models=[ + IndividualCommonModelScopeDeserializerRequest( + model_name="Employee", + model_permissions={ + "READ": ModelPermissionDeserializerRequest( + is_enabled=True, + ), + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ), + }, + ), + IndividualCommonModelScopeDeserializerRequest( + model_name="Benefit", + model_permissions={ + "WRITE": ModelPermissionDeserializerRequest( + is_enabled=False, + ) + }, + ), + ], + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/linked-account-scopes", @@ -309,9 +333,9 @@ async def linked_account_scopes_create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonModelScopeApi, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(CommonModelScopeApi, parse_obj_as(type_=CommonModelScopeApi, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/sync_status/client.py b/src/merge/resources/ticketing/resources/sync_status/client.py index c3b62e9b..8fe4da9e 100644 --- a/src/merge/resources/ticketing/resources/sync_status/client.py +++ b/src/merge/resources/ticketing/resources/sync_status/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_sync_status_list import PaginatedSyncStatusList @@ -56,9 +56,9 @@ def list( params={"cursor": cursor, "page_size": page_size}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedSyncStatusList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedSyncStatusList, parse_obj_as(type_=PaginatedSyncStatusList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -97,13 +97,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.sync_status.list() + + + async def main() -> None: + await client.ticketing.sync_status.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/sync-status", @@ -111,9 +119,9 @@ async def list( params={"cursor": cursor, "page_size": page_size}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedSyncStatusList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedSyncStatusList, parse_obj_as(type_=PaginatedSyncStatusList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/tags/client.py b/src/merge/resources/ticketing/resources/tags/client.py index 38ce25d6..6eb551f5 100644 --- a/src/merge/resources/ticketing/resources/tags/client.py +++ b/src/merge/resources/ticketing/resources/tags/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_tag_list import PaginatedTagList from ...types.tag import Tag @@ -98,9 +98,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTagList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTagList, parse_obj_as(type_=PaginatedTagList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -149,9 +149,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Tag, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Tag, parse_obj_as(type_=Tag, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -218,13 +218,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.tags.list() + + + async def main() -> None: + await client.ticketing.tags.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/tags", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTagList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTagList, parse_obj_as(type_=PaginatedTagList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -277,15 +285,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.tags.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ticketing.tags.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/tags/{jsonable_encoder(id)}", @@ -293,9 +309,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Tag, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Tag, parse_obj_as(type_=Tag, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/teams/client.py b/src/merge/resources/ticketing/resources/teams/client.py index a9271250..40851e6f 100644 --- a/src/merge/resources/ticketing/resources/teams/client.py +++ b/src/merge/resources/ticketing/resources/teams/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_team_list import PaginatedTeamList from ...types.team import Team @@ -98,9 +98,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTeamList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTeamList, parse_obj_as(type_=PaginatedTeamList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -149,9 +149,9 @@ def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Team, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Team, parse_obj_as(type_=Team, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -218,13 +218,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.teams.list() + + + async def main() -> None: + await client.ticketing.teams.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/teams", @@ -242,9 +250,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTeamList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTeamList, parse_obj_as(type_=PaginatedTeamList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -277,15 +285,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.teams.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ticketing.teams.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/teams/{jsonable_encoder(id)}", @@ -293,9 +309,9 @@ async def retrieve( params={"include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Team, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Team, parse_obj_as(type_=Team, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/tickets/client.py b/src/merge/resources/ticketing/resources/tickets/client.py index 3230df2c..11c8b86b 100644 --- a/src/merge/resources/ticketing/resources/tickets/client.py +++ b/src/merge/resources/ticketing/resources/tickets/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.meta_response import MetaResponse from ...types.paginated_remote_field_class_list import PaginatedRemoteFieldClassList @@ -239,9 +239,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTicketList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTicketList, parse_obj_as(type_=PaginatedTicketList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -297,9 +297,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TicketResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TicketResponse, parse_obj_as(type_=TicketResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -370,9 +370,9 @@ def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Ticket, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Ticket, parse_obj_as(type_=Ticket, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -432,9 +432,9 @@ def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TicketResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TicketResponse, parse_obj_as(type_=TicketResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -505,9 +505,9 @@ def collaborators_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedUserList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedUserList, parse_obj_as(type_=PaginatedUserList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -544,9 +544,9 @@ def meta_patch_retrieve(self, id: str, *, request_options: typing.Optional[Reque _response = self._client_wrapper.httpx_client.request( f"ticketing/v1/tickets/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -579,9 +579,9 @@ def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOptions] _response = self._client_wrapper.httpx_client.request( "ticketing/v1/tickets/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -642,9 +642,9 @@ def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -805,13 +805,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.tickets.list() + + + async def main() -> None: + await client.ticketing.tickets.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/tickets", @@ -858,9 +866,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedTicketList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedTicketList, parse_obj_as(type_=PaginatedTicketList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -897,6 +905,8 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ticketing import TicketRequest @@ -904,9 +914,15 @@ async def create( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.tickets.create( - model=TicketRequest(), - ) + + + async def main() -> None: + await client.ticketing.tickets.create( + model=TicketRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/tickets", @@ -916,9 +932,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TicketResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TicketResponse, parse_obj_as(type_=TicketResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -967,15 +983,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.tickets.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ticketing.tickets.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/tickets/{jsonable_encoder(id)}", @@ -989,9 +1013,9 @@ async def retrieve( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(Ticket, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(Ticket, parse_obj_as(type_=Ticket, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -1031,6 +1055,8 @@ async def partial_update( Examples -------- + import asyncio + from merge.client import AsyncMerge from merge.resources.ticketing import PatchedTicketRequest @@ -1038,10 +1064,16 @@ async def partial_update( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.tickets.partial_update( - id="id", - model=PatchedTicketRequest(), - ) + + + async def main() -> None: + await client.ticketing.tickets.partial_update( + id="id", + model=PatchedTicketRequest(), + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/tickets/{jsonable_encoder(id)}", @@ -1051,9 +1083,9 @@ async def partial_update( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(TicketResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(TicketResponse, parse_obj_as(type_=TicketResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -1102,15 +1134,23 @@ async def collaborators_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.tickets.collaborators_list( - parent_id="parent_id", - ) + + + async def main() -> None: + await client.ticketing.tickets.collaborators_list( + parent_id="parent_id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/tickets/{jsonable_encoder(parent_id)}/collaborators", @@ -1124,9 +1164,9 @@ async def collaborators_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedUserList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedUserList, parse_obj_as(type_=PaginatedUserList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -1152,22 +1192,30 @@ async def meta_patch_retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.tickets.meta_patch_retrieve( - id="id", - ) + + + async def main() -> None: + await client.ticketing.tickets.meta_patch_retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/tickets/meta/patch/{jsonable_encoder(id)}", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -1189,20 +1237,28 @@ async def meta_post_retrieve(self, *, request_options: typing.Optional[RequestOp Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.tickets.meta_post_retrieve() + + + async def main() -> None: + await client.ticketing.tickets.meta_post_retrieve() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/tickets/meta/post", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(MetaResponse, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(MetaResponse, parse_obj_as(type_=MetaResponse, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -1244,13 +1300,21 @@ async def remote_field_classes_list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.tickets.remote_field_classes_list() + + + async def main() -> None: + await client.ticketing.tickets.remote_field_classes_list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/tickets/remote-field-classes", @@ -1263,9 +1327,9 @@ async def remote_field_classes_list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedRemoteFieldClassList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedRemoteFieldClassList, parse_obj_as(type_=PaginatedRemoteFieldClassList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/users/client.py b/src/merge/resources/ticketing/resources/users/client.py index 3f79530f..f2b83635 100644 --- a/src/merge/resources/ticketing/resources/users/client.py +++ b/src/merge/resources/ticketing/resources/users/client.py @@ -8,7 +8,7 @@ from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .....core.datetime_utils import serialize_datetime from .....core.jsonable_encoder import jsonable_encoder -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.paginated_user_list import PaginatedUserList from ...types.user import User @@ -110,9 +110,9 @@ def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedUserList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedUserList, parse_obj_as(type_=PaginatedUserList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -165,9 +165,9 @@ def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(User, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(User, parse_obj_as(type_=User, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -242,13 +242,21 @@ async def list( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.users.list() + + + async def main() -> None: + await client.ticketing.users.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/users", @@ -268,9 +276,9 @@ async def list( }, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(PaginatedUserList, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(PaginatedUserList, parse_obj_as(type_=PaginatedUserList, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -307,15 +315,23 @@ async def retrieve( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.users.retrieve( - id="id", - ) + + + async def main() -> None: + await client.ticketing.users.retrieve( + id="id", + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( f"ticketing/v1/users/{jsonable_encoder(id)}", @@ -323,9 +339,9 @@ async def retrieve( params={"expand": expand, "include_remote_data": include_remote_data}, request_options=request_options, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(User, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(User, parse_obj_as(type_=User, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/resources/webhook_receivers/client.py b/src/merge/resources/ticketing/resources/webhook_receivers/client.py index 69cc8a61..f54ef6bf 100644 --- a/src/merge/resources/ticketing/resources/webhook_receivers/client.py +++ b/src/merge/resources/ticketing/resources/webhook_receivers/client.py @@ -5,7 +5,7 @@ from .....core.api_error import ApiError from .....core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .....core.pydantic_utilities import pydantic_v1 +from .....core.pydantic_utilities import parse_obj_as from .....core.request_options import RequestOptions from ...types.webhook_receiver import WebhookReceiver @@ -44,9 +44,9 @@ def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> ty _response = self._client_wrapper.httpx_client.request( "ticketing/v1/webhook-receivers", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[WebhookReceiver], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[WebhookReceiver], parse_obj_as(type_=typing.List[WebhookReceiver], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -99,9 +99,9 @@ def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(WebhookReceiver, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(WebhookReceiver, parse_obj_as(type_=WebhookReceiver, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -128,20 +128,28 @@ async def list(self, *, request_options: typing.Optional[RequestOptions] = None) Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.webhook_receivers.list() + + + async def main() -> None: + await client.ticketing.webhook_receivers.list() + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/webhook-receivers", method="GET", request_options=request_options ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.List[WebhookReceiver], _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(typing.List[WebhookReceiver], parse_obj_as(type_=typing.List[WebhookReceiver], object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -176,16 +184,24 @@ async def create( Examples -------- + import asyncio + from merge.client import AsyncMerge client = AsyncMerge( account_token="YOUR_ACCOUNT_TOKEN", api_key="YOUR_API_KEY", ) - await client.ticketing.webhook_receivers.create( - event="event", - is_active=True, - ) + + + async def main() -> None: + await client.ticketing.webhook_receivers.create( + event="event", + is_active=True, + ) + + + asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( "ticketing/v1/webhook-receivers", @@ -194,9 +210,9 @@ async def create( request_options=request_options, omit=OMIT, ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(WebhookReceiver, _response.json()) # type: ignore try: + if 200 <= _response.status_code < 300: + return typing.cast(WebhookReceiver, parse_obj_as(type_=WebhookReceiver, object_=_response.json())) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/src/merge/resources/ticketing/types/account.py b/src/merge/resources/ticketing/types/account.py index d82079c6..728c08f5 100644 --- a/src/merge/resources/ticketing/types/account.py +++ b/src/merge/resources/ticketing/types/account.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData -class Account(pydantic_v1.BaseModel): +class Account(UniversalBaseModel): """ # The Account Object @@ -24,32 +25,32 @@ class Account(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The account's name. """ - domains: typing.Optional[typing.List[typing.Optional[str]]] = pydantic_v1.Field() + domains: typing.Optional[typing.List[typing.Optional[str]]] = pydantic.Field() """ The account's domain names. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -57,20 +58,11 @@ class Account(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/account_details.py b/src/merge/resources/ticketing/types/account_details.py index 81f9587b..58cd348a 100644 --- a/src/merge/resources/ticketing/types/account_details.py +++ b/src/merge/resources/ticketing/types/account_details.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .category_enum import CategoryEnum -class AccountDetails(pydantic_v1.BaseModel): +class AccountDetails(UniversalBaseModel): id: typing.Optional[str] integration: typing.Optional[str] integration_slug: typing.Optional[str] @@ -18,27 +18,18 @@ class AccountDetails(pydantic_v1.BaseModel): end_user_email_address: typing.Optional[str] status: typing.Optional[str] webhook_listener_url: typing.Optional[str] - is_duplicate: typing.Optional[bool] = pydantic_v1.Field() + is_duplicate: typing.Optional[bool] = pydantic.Field() """ Whether a Production Linked Account's credentials match another existing Production Linked Account. This field is `null` for Test Linked Accounts, incomplete Production Linked Accounts, and ignored duplicate Production Linked Account sets. """ account_type: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/account_details_and_actions.py b/src/merge/resources/ticketing/types/account_details_and_actions.py index 6a1b2874..ffc38f58 100644 --- a/src/merge/resources/ticketing/types/account_details_and_actions.py +++ b/src/merge/resources/ticketing/types/account_details_and_actions.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_details_and_actions_integration import AccountDetailsAndActionsIntegration from .account_details_and_actions_status_enum import AccountDetailsAndActionsStatusEnum from .category_enum import CategoryEnum -class AccountDetailsAndActions(pydantic_v1.BaseModel): +class AccountDetailsAndActions(UniversalBaseModel): """ # The LinkedAccount Object @@ -30,13 +30,13 @@ class AccountDetailsAndActions(pydantic_v1.BaseModel): end_user_origin_id: typing.Optional[str] end_user_organization_name: str end_user_email_address: str - subdomain: typing.Optional[str] = pydantic_v1.Field() + subdomain: typing.Optional[str] = pydantic.Field() """ The tenant or domain the customer has provided access to. """ webhook_listener_url: str - is_duplicate: typing.Optional[bool] = pydantic_v1.Field() + is_duplicate: typing.Optional[bool] = pydantic.Field() """ Whether a Production Linked Account's credentials match another existing Production Linked Account. This field is `null` for Test Linked Accounts, incomplete Production Linked Accounts, and ignored duplicate Production Linked Account sets. """ @@ -44,20 +44,11 @@ class AccountDetailsAndActions(pydantic_v1.BaseModel): integration: typing.Optional[AccountDetailsAndActionsIntegration] account_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/account_details_and_actions_integration.py b/src/merge/resources/ticketing/types/account_details_and_actions_integration.py index 7c300a2b..8aa63726 100644 --- a/src/merge/resources/ticketing/types/account_details_and_actions_integration.py +++ b/src/merge/resources/ticketing/types/account_details_and_actions_integration.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .categories_enum import CategoriesEnum from .model_operation import ModelOperation -class AccountDetailsAndActionsIntegration(pydantic_v1.BaseModel): +class AccountDetailsAndActionsIntegration(UniversalBaseModel): name: str categories: typing.List[CategoriesEnum] image: typing.Optional[str] @@ -19,20 +19,11 @@ class AccountDetailsAndActionsIntegration(pydantic_v1.BaseModel): passthrough_available: bool available_model_operations: typing.Optional[typing.List[ModelOperation]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/account_integration.py b/src/merge/resources/ticketing/types/account_integration.py index 57006e35..7a8a27ab 100644 --- a/src/merge/resources/ticketing/types/account_integration.py +++ b/src/merge/resources/ticketing/types/account_integration.py @@ -1,69 +1,60 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .categories_enum import CategoriesEnum -class AccountIntegration(pydantic_v1.BaseModel): - name: str = pydantic_v1.Field() +class AccountIntegration(UniversalBaseModel): + name: str = pydantic.Field() """ Company name. """ - categories: typing.Optional[typing.List[CategoriesEnum]] = pydantic_v1.Field() + categories: typing.Optional[typing.List[CategoriesEnum]] = pydantic.Field() """ Category or categories this integration belongs to. Multiple categories should be comma separated, i.e. [ats, hris]. """ - image: typing.Optional[str] = pydantic_v1.Field() + image: typing.Optional[str] = pydantic.Field() """ Company logo in rectangular shape. Upload an image with a clear background. """ - square_image: typing.Optional[str] = pydantic_v1.Field() + square_image: typing.Optional[str] = pydantic.Field() """ Company logo in square shape. Upload an image with a white background. """ - color: typing.Optional[str] = pydantic_v1.Field() + color: typing.Optional[str] = pydantic.Field() """ The color of this integration used for buttons and text throughout the app and landing pages. Choose a darker, saturated color. """ slug: typing.Optional[str] - api_endpoints_to_documentation_urls: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + api_endpoints_to_documentation_urls: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ Mapping of API endpoints to documentation urls for support. Example: {'GET': [['/common-model-scopes', 'https://docs.merge.dev/accounting/common-model-scopes/#common_model_scopes_retrieve'],['/common-model-actions', 'https://docs.merge.dev/accounting/common-model-actions/#common_model_actions_retrieve']], 'POST': []} """ - webhook_setup_guide_url: typing.Optional[str] = pydantic_v1.Field() + webhook_setup_guide_url: typing.Optional[str] = pydantic.Field() """ Setup guide URL for third party webhook creation. Exposed in Merge Docs. """ - category_beta_status: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + category_beta_status: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ Category or categories this integration is in beta status for. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/account_token.py b/src/merge/resources/ticketing/types/account_token.py index 4794fea3..c280c7cb 100644 --- a/src/merge/resources/ticketing/types/account_token.py +++ b/src/merge/resources/ticketing/types/account_token.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_integration import AccountIntegration -class AccountToken(pydantic_v1.BaseModel): +class AccountToken(UniversalBaseModel): account_token: str integration: AccountIntegration - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/advanced_metadata.py b/src/merge/resources/ticketing/types/advanced_metadata.py index 2aa7d1d6..5b0ec9e2 100644 --- a/src/merge/resources/ticketing/types/advanced_metadata.py +++ b/src/merge/resources/ticketing/types/advanced_metadata.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class AdvancedMetadata(pydantic_v1.BaseModel): + +class AdvancedMetadata(UniversalBaseModel): id: str display_name: typing.Optional[str] description: typing.Optional[str] @@ -15,20 +15,11 @@ class AdvancedMetadata(pydantic_v1.BaseModel): is_custom: typing.Optional[bool] field_choices: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/async_passthrough_reciept.py b/src/merge/resources/ticketing/types/async_passthrough_reciept.py index 2cc33210..f2144443 100644 --- a/src/merge/resources/ticketing/types/async_passthrough_reciept.py +++ b/src/merge/resources/ticketing/types/async_passthrough_reciept.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class AsyncPassthroughReciept(pydantic_v1.BaseModel): - async_passthrough_receipt_id: str - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class AsyncPassthroughReciept(UniversalBaseModel): + async_passthrough_receipt_id: str - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/attachment.py b/src/merge/resources/ticketing/types/attachment.py index 7bff1381..c45eee70 100644 --- a/src/merge/resources/ticketing/types/attachment.py +++ b/src/merge/resources/ticketing/types/attachment.py @@ -5,12 +5,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel, update_forward_refs from .remote_data import RemoteData -class Attachment(pydantic_v1.BaseModel): +class Attachment(UniversalBaseModel): """ # The Attachment Object @@ -24,47 +25,47 @@ class Attachment(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - file_name: typing.Optional[str] = pydantic_v1.Field() + file_name: typing.Optional[str] = pydantic.Field() """ The attachment's name. It is required to include the file extension in the attachment's name. """ - ticket: typing.Optional[AttachmentTicket] = pydantic_v1.Field() + ticket: typing.Optional[AttachmentTicket] = pydantic.Field() """ The ticket associated with the attachment. """ - file_url: typing.Optional[str] = pydantic_v1.Field() + file_url: typing.Optional[str] = pydantic.Field() """ The attachment's url. It is required to include the file extension in the file's URL. """ - content_type: typing.Optional[str] = pydantic_v1.Field() + content_type: typing.Optional[str] = pydantic.Field() """ The attachment's file format. """ - uploaded_by: typing.Optional[str] = pydantic_v1.Field() + uploaded_by: typing.Optional[str] = pydantic.Field() """ The user who uploaded the attachment. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's attachment was created. """ @@ -73,25 +74,16 @@ class Attachment(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow from .attachment_ticket import AttachmentTicket # noqa: E402 -Attachment.update_forward_refs() +update_forward_refs(Attachment) diff --git a/src/merge/resources/ticketing/types/attachment_request.py b/src/merge/resources/ticketing/types/attachment_request.py index e859c014..f499908a 100644 --- a/src/merge/resources/ticketing/types/attachment_request.py +++ b/src/merge/resources/ticketing/types/attachment_request.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .attachment_request_ticket import AttachmentRequestTicket -class AttachmentRequest(pydantic_v1.BaseModel): +class AttachmentRequest(UniversalBaseModel): """ # The Attachment Object @@ -21,27 +21,27 @@ class AttachmentRequest(pydantic_v1.BaseModel): TODO """ - file_name: typing.Optional[str] = pydantic_v1.Field() + file_name: typing.Optional[str] = pydantic.Field() """ The attachment's name. It is required to include the file extension in the attachment's name. """ - ticket: typing.Optional[AttachmentRequestTicket] = pydantic_v1.Field() + ticket: typing.Optional[AttachmentRequestTicket] = pydantic.Field() """ The ticket associated with the attachment. """ - file_url: typing.Optional[str] = pydantic_v1.Field() + file_url: typing.Optional[str] = pydantic.Field() """ The attachment's url. It is required to include the file extension in the file's URL. """ - content_type: typing.Optional[str] = pydantic_v1.Field() + content_type: typing.Optional[str] = pydantic.Field() """ The attachment's file format. """ - uploaded_by: typing.Optional[str] = pydantic_v1.Field() + uploaded_by: typing.Optional[str] = pydantic.Field() """ The user who uploaded the attachment. """ @@ -49,20 +49,11 @@ class AttachmentRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/audit_log_event.py b/src/merge/resources/ticketing/types/audit_log_event.py index 43be24a5..c0b4e43a 100644 --- a/src/merge/resources/ticketing/types/audit_log_event.py +++ b/src/merge/resources/ticketing/types/audit_log_event.py @@ -3,25 +3,26 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .audit_log_event_event_type import AuditLogEventEventType from .audit_log_event_role import AuditLogEventRole -class AuditLogEvent(pydantic_v1.BaseModel): +class AuditLogEvent(UniversalBaseModel): id: typing.Optional[str] - user_name: typing.Optional[str] = pydantic_v1.Field() + user_name: typing.Optional[str] = pydantic.Field() """ The User's full name at the time of this Event occurring. """ - user_email: typing.Optional[str] = pydantic_v1.Field() + user_email: typing.Optional[str] = pydantic.Field() """ The User's email at the time of this Event occurring. """ - role: AuditLogEventRole = pydantic_v1.Field() + role: AuditLogEventRole = pydantic.Field() """ Designates the role of the user (or SYSTEM/API if action not taken by a user) at the time of this Event occurring. @@ -34,7 +35,7 @@ class AuditLogEvent(pydantic_v1.BaseModel): """ ip_address: str - event_type: AuditLogEventEventType = pydantic_v1.Field() + event_type: AuditLogEventEventType = pydantic.Field() """ Designates the type of event that occurred. @@ -80,20 +81,11 @@ class AuditLogEvent(pydantic_v1.BaseModel): event_description: str created_at: typing.Optional[dt.datetime] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/available_actions.py b/src/merge/resources/ticketing/types/available_actions.py index bbd94581..1f1d424c 100644 --- a/src/merge/resources/ticketing/types/available_actions.py +++ b/src/merge/resources/ticketing/types/available_actions.py @@ -1,15 +1,15 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_integration import AccountIntegration from .model_operation import ModelOperation -class AvailableActions(pydantic_v1.BaseModel): +class AvailableActions(UniversalBaseModel): """ # The AvailableActions Object @@ -26,20 +26,11 @@ class AvailableActions(pydantic_v1.BaseModel): passthrough_available: bool available_model_operations: typing.Optional[typing.List[ModelOperation]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/collection.py b/src/merge/resources/ticketing/types/collection.py index 86d10b86..5327c02a 100644 --- a/src/merge/resources/ticketing/types/collection.py +++ b/src/merge/resources/ticketing/types/collection.py @@ -5,14 +5,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel, update_forward_refs from .collection_access_level import CollectionAccessLevel from .collection_collection_type import CollectionCollectionType from .remote_data import RemoteData -class Collection(pydantic_v1.BaseModel): +class Collection(UniversalBaseModel): """ # The Collection Object @@ -27,32 +28,32 @@ class Collection(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The collection's name. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The collection's description. """ - collection_type: typing.Optional[CollectionCollectionType] = pydantic_v1.Field() + collection_type: typing.Optional[CollectionCollectionType] = pydantic.Field() """ The collection's type. @@ -60,17 +61,17 @@ class Collection(pydantic_v1.BaseModel): - `PROJECT` - PROJECT """ - parent_collection: typing.Optional[CollectionParentCollection] = pydantic_v1.Field() + parent_collection: typing.Optional[CollectionParentCollection] = pydantic.Field() """ The parent collection for this collection. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ - access_level: typing.Optional[CollectionAccessLevel] = pydantic_v1.Field() + access_level: typing.Optional[CollectionAccessLevel] = pydantic.Field() """ The level of access a User has to the Collection and its sub-objects. @@ -82,25 +83,16 @@ class Collection(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow from .collection_parent_collection import CollectionParentCollection # noqa: E402 -Collection.update_forward_refs() +update_forward_refs(Collection) diff --git a/src/merge/resources/ticketing/types/comment.py b/src/merge/resources/ticketing/types/comment.py index 1640edee..9a408b8d 100644 --- a/src/merge/resources/ticketing/types/comment.py +++ b/src/merge/resources/ticketing/types/comment.py @@ -3,15 +3,16 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .comment_contact import CommentContact from .comment_ticket import CommentTicket from .comment_user import CommentUser from .remote_data import RemoteData -class Comment(pydantic_v1.BaseModel): +class Comment(UniversalBaseModel): """ # The Comment Object @@ -25,52 +26,52 @@ class Comment(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - user: typing.Optional[CommentUser] = pydantic_v1.Field() + user: typing.Optional[CommentUser] = pydantic.Field() """ The author of the Comment, if the author is a User. """ - contact: typing.Optional[CommentContact] = pydantic_v1.Field() + contact: typing.Optional[CommentContact] = pydantic.Field() """ The author of the Comment, if the author is a Contact. """ - body: typing.Optional[str] = pydantic_v1.Field() + body: typing.Optional[str] = pydantic.Field() """ The comment's text body. """ - html_body: typing.Optional[str] = pydantic_v1.Field() + html_body: typing.Optional[str] = pydantic.Field() """ The comment's text body formatted as html. """ - ticket: typing.Optional[CommentTicket] = pydantic_v1.Field() + ticket: typing.Optional[CommentTicket] = pydantic.Field() """ The ticket associated with the comment. """ - is_private: typing.Optional[bool] = pydantic_v1.Field() + is_private: typing.Optional[bool] = pydantic.Field() """ Whether or not the comment is internal. """ - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's comment was created. """ @@ -79,20 +80,11 @@ class Comment(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/comment_request.py b/src/merge/resources/ticketing/types/comment_request.py index 77e0cb8e..0b3e7a62 100644 --- a/src/merge/resources/ticketing/types/comment_request.py +++ b/src/merge/resources/ticketing/types/comment_request.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .comment_request_contact import CommentRequestContact from .comment_request_ticket import CommentRequestTicket from .comment_request_user import CommentRequestUser -class CommentRequest(pydantic_v1.BaseModel): +class CommentRequest(UniversalBaseModel): """ # The Comment Object @@ -23,32 +23,32 @@ class CommentRequest(pydantic_v1.BaseModel): TODO """ - user: typing.Optional[CommentRequestUser] = pydantic_v1.Field() + user: typing.Optional[CommentRequestUser] = pydantic.Field() """ The author of the Comment, if the author is a User. """ - contact: typing.Optional[CommentRequestContact] = pydantic_v1.Field() + contact: typing.Optional[CommentRequestContact] = pydantic.Field() """ The author of the Comment, if the author is a Contact. """ - body: typing.Optional[str] = pydantic_v1.Field() + body: typing.Optional[str] = pydantic.Field() """ The comment's text body. """ - html_body: typing.Optional[str] = pydantic_v1.Field() + html_body: typing.Optional[str] = pydantic.Field() """ The comment's text body formatted as html. """ - ticket: typing.Optional[CommentRequestTicket] = pydantic_v1.Field() + ticket: typing.Optional[CommentRequestTicket] = pydantic.Field() """ The ticket associated with the comment. """ - is_private: typing.Optional[bool] = pydantic_v1.Field() + is_private: typing.Optional[bool] = pydantic.Field() """ Whether or not the comment is internal. """ @@ -56,20 +56,11 @@ class CommentRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/comment_response.py b/src/merge/resources/ticketing/types/comment_response.py index 5396f0fb..0619fed8 100644 --- a/src/merge/resources/ticketing/types/comment_response.py +++ b/src/merge/resources/ticketing/types/comment_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .comment import Comment from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .warning_validation_problem import WarningValidationProblem -class CommentResponse(pydantic_v1.BaseModel): +class CommentResponse(UniversalBaseModel): model: Comment warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/common_model_scope_api.py b/src/merge/resources/ticketing/types/common_model_scope_api.py index d6fdec56..093001ba 100644 --- a/src/merge/resources/ticketing/types/common_model_scope_api.py +++ b/src/merge/resources/ticketing/types/common_model_scope_api.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .individual_common_model_scope_deserializer import IndividualCommonModelScopeDeserializer -class CommonModelScopeApi(pydantic_v1.BaseModel): - common_models: typing.List[IndividualCommonModelScopeDeserializer] = pydantic_v1.Field() +class CommonModelScopeApi(UniversalBaseModel): + common_models: typing.List[IndividualCommonModelScopeDeserializer] = pydantic.Field() """ The common models you want to update the scopes for """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/common_model_scopes_body_request.py b/src/merge/resources/ticketing/types/common_model_scopes_body_request.py index e956cbdc..9098f031 100644 --- a/src/merge/resources/ticketing/types/common_model_scopes_body_request.py +++ b/src/merge/resources/ticketing/types/common_model_scopes_body_request.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .enabled_actions_enum import EnabledActionsEnum -class CommonModelScopesBodyRequest(pydantic_v1.BaseModel): +class CommonModelScopesBodyRequest(UniversalBaseModel): model_id: str enabled_actions: typing.List[EnabledActionsEnum] disabled_fields: typing.List[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/contact.py b/src/merge/resources/ticketing/types/contact.py index 1ae431ab..754889d3 100644 --- a/src/merge/resources/ticketing/types/contact.py +++ b/src/merge/resources/ticketing/types/contact.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .contact_account import ContactAccount from .remote_data import RemoteData -class Contact(pydantic_v1.BaseModel): +class Contact(UniversalBaseModel): """ # The Contact Object @@ -23,47 +24,47 @@ class Contact(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The contact's name. """ - email_address: typing.Optional[str] = pydantic_v1.Field() + email_address: typing.Optional[str] = pydantic.Field() """ The contact's email address. """ - phone_number: typing.Optional[str] = pydantic_v1.Field() + phone_number: typing.Optional[str] = pydantic.Field() """ The contact's phone number. """ - details: typing.Optional[str] = pydantic_v1.Field() + details: typing.Optional[str] = pydantic.Field() """ The contact's details. """ - account: typing.Optional[ContactAccount] = pydantic_v1.Field() + account: typing.Optional[ContactAccount] = pydantic.Field() """ The contact's account. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -71,20 +72,11 @@ class Contact(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/contact_request.py b/src/merge/resources/ticketing/types/contact_request.py index a71e7ab8..fcdcdf41 100644 --- a/src/merge/resources/ticketing/types/contact_request.py +++ b/src/merge/resources/ticketing/types/contact_request.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .contact_request_account import ContactRequestAccount -class ContactRequest(pydantic_v1.BaseModel): +class ContactRequest(UniversalBaseModel): """ # The Contact Object @@ -21,27 +21,27 @@ class ContactRequest(pydantic_v1.BaseModel): TODO """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The contact's name. """ - email_address: typing.Optional[str] = pydantic_v1.Field() + email_address: typing.Optional[str] = pydantic.Field() """ The contact's email address. """ - phone_number: typing.Optional[str] = pydantic_v1.Field() + phone_number: typing.Optional[str] = pydantic.Field() """ The contact's phone number. """ - details: typing.Optional[str] = pydantic_v1.Field() + details: typing.Optional[str] = pydantic.Field() """ The contact's details. """ - account: typing.Optional[ContactRequestAccount] = pydantic_v1.Field() + account: typing.Optional[ContactRequestAccount] = pydantic.Field() """ The contact's account. """ @@ -49,20 +49,11 @@ class ContactRequest(pydantic_v1.BaseModel): integration_params: typing.Optional[typing.Dict[str, typing.Any]] linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/data_passthrough_request.py b/src/merge/resources/ticketing/types/data_passthrough_request.py index fdf9b18c..3f9ee090 100644 --- a/src/merge/resources/ticketing/types/data_passthrough_request.py +++ b/src/merge/resources/ticketing/types/data_passthrough_request.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .method_enum import MethodEnum from .multipart_form_field_request import MultipartFormFieldRequest from .request_format_enum import RequestFormatEnum -class DataPassthroughRequest(pydantic_v1.BaseModel): +class DataPassthroughRequest(UniversalBaseModel): """ # The DataPassthrough Object @@ -24,51 +24,42 @@ class DataPassthroughRequest(pydantic_v1.BaseModel): """ method: MethodEnum - path: str = pydantic_v1.Field() + path: str = pydantic.Field() """ The path of the request in the third party's platform. """ - base_url_override: typing.Optional[str] = pydantic_v1.Field() + base_url_override: typing.Optional[str] = pydantic.Field() """ An optional override of the third party's base url for the request. """ - data: typing.Optional[str] = pydantic_v1.Field() + data: typing.Optional[str] = pydantic.Field() """ The data with the request. You must include a `request_format` parameter matching the data's format """ - multipart_form_data: typing.Optional[typing.List[MultipartFormFieldRequest]] = pydantic_v1.Field() + multipart_form_data: typing.Optional[typing.List[MultipartFormFieldRequest]] = pydantic.Field() """ Pass an array of `MultipartFormField` objects in here instead of using the `data` param if `request_format` is set to `MULTIPART`. """ - headers: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field() + headers: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field() """ The headers to use for the request (Merge will handle the account's authorization headers). `Content-Type` header is required for passthrough. Choose content type corresponding to expected format of receiving server. """ request_format: typing.Optional[RequestFormatEnum] - normalize_response: typing.Optional[bool] = pydantic_v1.Field() + normalize_response: typing.Optional[bool] = pydantic.Field() """ Optional. If true, the response will always be an object of the form `{"type": T, "value": ...}` where `T` will be one of `string, boolean, number, null, array, object`. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/debug_mode_log.py b/src/merge/resources/ticketing/types/debug_mode_log.py index 321c9090..8edea2ae 100644 --- a/src/merge/resources/ticketing/types/debug_mode_log.py +++ b/src/merge/resources/ticketing/types/debug_mode_log.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_model_log_summary import DebugModelLogSummary -class DebugModeLog(pydantic_v1.BaseModel): +class DebugModeLog(UniversalBaseModel): log_id: str dashboard_view: str log_summary: DebugModelLogSummary - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/debug_model_log_summary.py b/src/merge/resources/ticketing/types/debug_model_log_summary.py index 06bb154b..4f5b07db 100644 --- a/src/merge/resources/ticketing/types/debug_model_log_summary.py +++ b/src/merge/resources/ticketing/types/debug_model_log_summary.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class DebugModelLogSummary(pydantic_v1.BaseModel): + +class DebugModelLogSummary(UniversalBaseModel): url: str method: str status_code: int - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/error_validation_problem.py b/src/merge/resources/ticketing/types/error_validation_problem.py index 425af45c..3838491d 100644 --- a/src/merge/resources/ticketing/types/error_validation_problem.py +++ b/src/merge/resources/ticketing/types/error_validation_problem.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .validation_problem_source import ValidationProblemSource -class ErrorValidationProblem(pydantic_v1.BaseModel): +class ErrorValidationProblem(UniversalBaseModel): source: typing.Optional[ValidationProblemSource] title: str detail: str problem_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/external_target_field_api.py b/src/merge/resources/ticketing/types/external_target_field_api.py index a97d536a..8a971c64 100644 --- a/src/merge/resources/ticketing/types/external_target_field_api.py +++ b/src/merge/resources/ticketing/types/external_target_field_api.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ExternalTargetFieldApi(pydantic_v1.BaseModel): + +class ExternalTargetFieldApi(UniversalBaseModel): name: typing.Optional[str] description: typing.Optional[str] is_mapped: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/external_target_field_api_response.py b/src/merge/resources/ticketing/types/external_target_field_api_response.py index a7edc1b3..03bec8c6 100644 --- a/src/merge/resources/ticketing/types/external_target_field_api_response.py +++ b/src/merge/resources/ticketing/types/external_target_field_api_response.py @@ -1,42 +1,31 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .external_target_field_api import ExternalTargetFieldApi - - -class ExternalTargetFieldApiResponse(pydantic_v1.BaseModel): - ticket: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Ticket") - comment: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Comment") - project: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Project") - collection: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Collection") - user: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="User") - role: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Role") - account: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Account") - team: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Team") - attachment: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Attachment") - tag: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Tag") - contact: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic_v1.Field(alias="Contact") +import pydantic - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .external_target_field_api import ExternalTargetFieldApi - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} +class ExternalTargetFieldApiResponse(UniversalBaseModel): + ticket: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Ticket") + comment: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Comment") + project: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Project") + collection: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Collection") + user: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="User") + role: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Role") + account: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Account") + team: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Team") + attachment: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Attachment") + tag: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Tag") + contact: typing.Optional[typing.List[ExternalTargetFieldApi]] = pydantic.Field(alias="Contact") + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/field_mapping_api_instance.py b/src/merge/resources/ticketing/types/field_mapping_api_instance.py index d9d7670d..8af85a52 100644 --- a/src/merge/resources/ticketing/types/field_mapping_api_instance.py +++ b/src/merge/resources/ticketing/types/field_mapping_api_instance.py @@ -1,34 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_mapping_api_instance_remote_field import FieldMappingApiInstanceRemoteField from .field_mapping_api_instance_target_field import FieldMappingApiInstanceTargetField -class FieldMappingApiInstance(pydantic_v1.BaseModel): +class FieldMappingApiInstance(UniversalBaseModel): id: typing.Optional[str] is_integration_wide: typing.Optional[bool] target_field: typing.Optional[FieldMappingApiInstanceTargetField] remote_field: typing.Optional[FieldMappingApiInstanceRemoteField] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/field_mapping_api_instance_remote_field.py b/src/merge/resources/ticketing/types/field_mapping_api_instance_remote_field.py index 9539d3dc..0635b76f 100644 --- a/src/merge/resources/ticketing/types/field_mapping_api_instance_remote_field.py +++ b/src/merge/resources/ticketing/types/field_mapping_api_instance_remote_field.py @@ -1,36 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_mapping_api_instance_remote_field_remote_endpoint_info import ( FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo, ) -class FieldMappingApiInstanceRemoteField(pydantic_v1.BaseModel): +class FieldMappingApiInstanceRemoteField(UniversalBaseModel): remote_key_name: str - schema_: typing.Dict[str, typing.Any] = pydantic_v1.Field(alias="schema") + schema_: typing.Dict[str, typing.Any] = pydantic.Field(alias="schema") remote_endpoint_info: FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py b/src/merge/resources/ticketing/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py index d9fcc276..e34eb6e4 100644 --- a/src/merge/resources/ticketing/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py +++ b/src/merge/resources/ticketing/types/field_mapping_api_instance_remote_field_remote_endpoint_info.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo(pydantic_v1.BaseModel): + +class FieldMappingApiInstanceRemoteFieldRemoteEndpointInfo(UniversalBaseModel): method: typing.Optional[str] url_path: typing.Optional[str] field_traversal_path: typing.Optional[typing.List[str]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/field_mapping_api_instance_response.py b/src/merge/resources/ticketing/types/field_mapping_api_instance_response.py index 3386985e..ed87c667 100644 --- a/src/merge/resources/ticketing/types/field_mapping_api_instance_response.py +++ b/src/merge/resources/ticketing/types/field_mapping_api_instance_response.py @@ -1,42 +1,31 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .field_mapping_api_instance import FieldMappingApiInstance - - -class FieldMappingApiInstanceResponse(pydantic_v1.BaseModel): - ticket: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Ticket") - comment: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Comment") - project: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Project") - collection: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Collection") - user: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="User") - role: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Role") - account: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Account") - team: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Team") - attachment: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Attachment") - tag: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Tag") - contact: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic_v1.Field(alias="Contact") +import pydantic - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .field_mapping_api_instance import FieldMappingApiInstance - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} +class FieldMappingApiInstanceResponse(UniversalBaseModel): + ticket: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Ticket") + comment: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Comment") + project: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Project") + collection: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Collection") + user: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="User") + role: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Role") + account: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Account") + team: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Team") + attachment: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Attachment") + tag: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Tag") + contact: typing.Optional[typing.List[FieldMappingApiInstance]] = pydantic.Field(alias="Contact") + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/field_mapping_api_instance_target_field.py b/src/merge/resources/ticketing/types/field_mapping_api_instance_target_field.py index 25a8dcff..c590d4ce 100644 --- a/src/merge/resources/ticketing/types/field_mapping_api_instance_target_field.py +++ b/src/merge/resources/ticketing/types/field_mapping_api_instance_target_field.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldMappingApiInstanceTargetField(pydantic_v1.BaseModel): + +class FieldMappingApiInstanceTargetField(UniversalBaseModel): name: str description: str is_organization_wide: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/field_mapping_instance_response.py b/src/merge/resources/ticketing/types/field_mapping_instance_response.py index b55d2c40..aaf06f0e 100644 --- a/src/merge/resources/ticketing/types/field_mapping_instance_response.py +++ b/src/merge/resources/ticketing/types/field_mapping_instance_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .field_mapping_api_instance import FieldMappingApiInstance from .warning_validation_problem import WarningValidationProblem -class FieldMappingInstanceResponse(pydantic_v1.BaseModel): +class FieldMappingInstanceResponse(UniversalBaseModel): model: FieldMappingApiInstance warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/field_permission_deserializer.py b/src/merge/resources/ticketing/types/field_permission_deserializer.py index 124f3deb..ed80b9d6 100644 --- a/src/merge/resources/ticketing/types/field_permission_deserializer.py +++ b/src/merge/resources/ticketing/types/field_permission_deserializer.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldPermissionDeserializer(pydantic_v1.BaseModel): + +class FieldPermissionDeserializer(UniversalBaseModel): enabled: typing.Optional[typing.List[typing.Any]] disabled: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/field_permission_deserializer_request.py b/src/merge/resources/ticketing/types/field_permission_deserializer_request.py index 65e80e75..e937e743 100644 --- a/src/merge/resources/ticketing/types/field_permission_deserializer_request.py +++ b/src/merge/resources/ticketing/types/field_permission_deserializer_request.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class FieldPermissionDeserializerRequest(pydantic_v1.BaseModel): + +class FieldPermissionDeserializerRequest(UniversalBaseModel): enabled: typing.Optional[typing.List[typing.Any]] disabled: typing.Optional[typing.List[typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/individual_common_model_scope_deserializer.py b/src/merge/resources/ticketing/types/individual_common_model_scope_deserializer.py index d80ca06e..ffa55055 100644 --- a/src/merge/resources/ticketing/types/individual_common_model_scope_deserializer.py +++ b/src/merge/resources/ticketing/types/individual_common_model_scope_deserializer.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_permission_deserializer import FieldPermissionDeserializer from .model_permission_deserializer import ModelPermissionDeserializer -class IndividualCommonModelScopeDeserializer(pydantic_v1.BaseModel): +class IndividualCommonModelScopeDeserializer(UniversalBaseModel): model_name: str model_permissions: typing.Optional[typing.Dict[str, ModelPermissionDeserializer]] field_permissions: typing.Optional[FieldPermissionDeserializer] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/individual_common_model_scope_deserializer_request.py b/src/merge/resources/ticketing/types/individual_common_model_scope_deserializer_request.py index 8f2e7de5..d0e68f6d 100644 --- a/src/merge/resources/ticketing/types/individual_common_model_scope_deserializer_request.py +++ b/src/merge/resources/ticketing/types/individual_common_model_scope_deserializer_request.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .field_permission_deserializer_request import FieldPermissionDeserializerRequest from .model_permission_deserializer_request import ModelPermissionDeserializerRequest -class IndividualCommonModelScopeDeserializerRequest(pydantic_v1.BaseModel): +class IndividualCommonModelScopeDeserializerRequest(UniversalBaseModel): model_name: str model_permissions: typing.Optional[typing.Dict[str, ModelPermissionDeserializerRequest]] field_permissions: typing.Optional[FieldPermissionDeserializerRequest] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/issue.py b/src/merge/resources/ticketing/types/issue.py index 086a0db2..28366731 100644 --- a/src/merge/resources/ticketing/types/issue.py +++ b/src/merge/resources/ticketing/types/issue.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .issue_status import IssueStatus -class Issue(pydantic_v1.BaseModel): +class Issue(UniversalBaseModel): id: typing.Optional[str] - status: typing.Optional[IssueStatus] = pydantic_v1.Field() + status: typing.Optional[IssueStatus] = pydantic.Field() """ Status of the issue. Options: ('ONGOING', 'RESOLVED') @@ -25,20 +26,11 @@ class Issue(pydantic_v1.BaseModel): is_muted: typing.Optional[bool] error_details: typing.Optional[typing.List[str]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/item_schema.py b/src/merge/resources/ticketing/types/item_schema.py index 45059fd0..65f321ef 100644 --- a/src/merge/resources/ticketing/types/item_schema.py +++ b/src/merge/resources/ticketing/types/item_schema.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .item_format_enum import ItemFormatEnum from .item_type_enum import ItemTypeEnum -class ItemSchema(pydantic_v1.BaseModel): +class ItemSchema(UniversalBaseModel): item_type: typing.Optional[ItemTypeEnum] item_format: typing.Optional[ItemFormatEnum] item_choices: typing.Optional[typing.List[str]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/link_token.py b/src/merge/resources/ticketing/types/link_token.py index 1c82d1ac..87c88faf 100644 --- a/src/merge/resources/ticketing/types/link_token.py +++ b/src/merge/resources/ticketing/types/link_token.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class LinkToken(pydantic_v1.BaseModel): + +class LinkToken(UniversalBaseModel): link_token: str integration_name: typing.Optional[str] magic_link_url: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/linked_account_status.py b/src/merge/resources/ticketing/types/linked_account_status.py index 60e21a98..34184012 100644 --- a/src/merge/resources/ticketing/types/linked_account_status.py +++ b/src/merge/resources/ticketing/types/linked_account_status.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class LinkedAccountStatus(pydantic_v1.BaseModel): + +class LinkedAccountStatus(UniversalBaseModel): linked_account_status: str can_make_request: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/meta_response.py b/src/merge/resources/ticketing/types/meta_response.py index debaf4ef..27e02126 100644 --- a/src/merge/resources/ticketing/types/meta_response.py +++ b/src/merge/resources/ticketing/types/meta_response.py @@ -1,34 +1,25 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .linked_account_status import LinkedAccountStatus -class MetaResponse(pydantic_v1.BaseModel): +class MetaResponse(UniversalBaseModel): request_schema: typing.Dict[str, typing.Any] remote_field_classes: typing.Optional[typing.Dict[str, typing.Any]] status: typing.Optional[LinkedAccountStatus] has_conditional_params: bool has_required_linked_account_params: bool - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/model_operation.py b/src/merge/resources/ticketing/types/model_operation.py index 0f4429ec..efe8355e 100644 --- a/src/merge/resources/ticketing/types/model_operation.py +++ b/src/merge/resources/ticketing/types/model_operation.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelOperation(pydantic_v1.BaseModel): + +class ModelOperation(UniversalBaseModel): """ # The ModelOperation Object @@ -25,20 +25,11 @@ class ModelOperation(pydantic_v1.BaseModel): required_post_parameters: typing.List[str] supported_fields: typing.List[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/model_permission_deserializer.py b/src/merge/resources/ticketing/types/model_permission_deserializer.py index 5a6adf20..14bc4f99 100644 --- a/src/merge/resources/ticketing/types/model_permission_deserializer.py +++ b/src/merge/resources/ticketing/types/model_permission_deserializer.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelPermissionDeserializer(pydantic_v1.BaseModel): - is_enabled: typing.Optional[bool] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ModelPermissionDeserializer(UniversalBaseModel): + is_enabled: typing.Optional[bool] - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/model_permission_deserializer_request.py b/src/merge/resources/ticketing/types/model_permission_deserializer_request.py index 3f72b9ac..cc2e7f77 100644 --- a/src/merge/resources/ticketing/types/model_permission_deserializer_request.py +++ b/src/merge/resources/ticketing/types/model_permission_deserializer_request.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ModelPermissionDeserializerRequest(pydantic_v1.BaseModel): - is_enabled: typing.Optional[bool] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ModelPermissionDeserializerRequest(UniversalBaseModel): + is_enabled: typing.Optional[bool] - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/multipart_form_field_request.py b/src/merge/resources/ticketing/types/multipart_form_field_request.py index 9c8ffb21..b6a6c708 100644 --- a/src/merge/resources/ticketing/types/multipart_form_field_request.py +++ b/src/merge/resources/ticketing/types/multipart_form_field_request.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .multipart_form_field_request_encoding import MultipartFormFieldRequestEncoding -class MultipartFormFieldRequest(pydantic_v1.BaseModel): +class MultipartFormFieldRequest(UniversalBaseModel): """ # The MultipartFormField Object @@ -21,17 +21,17 @@ class MultipartFormFieldRequest(pydantic_v1.BaseModel): Create a `MultipartFormField` to define a multipart form entry. """ - name: str = pydantic_v1.Field() + name: str = pydantic.Field() """ The name of the form field """ - data: str = pydantic_v1.Field() + data: str = pydantic.Field() """ The data for the form field. """ - encoding: typing.Optional[MultipartFormFieldRequestEncoding] = pydantic_v1.Field() + encoding: typing.Optional[MultipartFormFieldRequestEncoding] = pydantic.Field() """ The encoding of the value of `data`. Defaults to `RAW` if not defined. @@ -40,30 +40,21 @@ class MultipartFormFieldRequest(pydantic_v1.BaseModel): - `GZIP_BASE64` - GZIP_BASE64 """ - file_name: typing.Optional[str] = pydantic_v1.Field() + file_name: typing.Optional[str] = pydantic.Field() """ The file name of the form field, if the field is for a file. """ - content_type: typing.Optional[str] = pydantic_v1.Field() + content_type: typing.Optional[str] = pydantic.Field() """ The MIME type of the file, if the field is for a file. """ - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/paginated_account_details_and_actions_list.py b/src/merge/resources/ticketing/types/paginated_account_details_and_actions_list.py index 280100c4..07323330 100644 --- a/src/merge/resources/ticketing/types/paginated_account_details_and_actions_list.py +++ b/src/merge/resources/ticketing/types/paginated_account_details_and_actions_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account_details_and_actions import AccountDetailsAndActions -class PaginatedAccountDetailsAndActionsList(pydantic_v1.BaseModel): +class PaginatedAccountDetailsAndActionsList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[AccountDetailsAndActions]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/paginated_account_list.py b/src/merge/resources/ticketing/types/paginated_account_list.py index 21d2cda1..d9be284b 100644 --- a/src/merge/resources/ticketing/types/paginated_account_list.py +++ b/src/merge/resources/ticketing/types/paginated_account_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .account import Account -class PaginatedAccountList(pydantic_v1.BaseModel): +class PaginatedAccountList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Account]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/paginated_attachment_list.py b/src/merge/resources/ticketing/types/paginated_attachment_list.py index 52f28d1b..4e3d196b 100644 --- a/src/merge/resources/ticketing/types/paginated_attachment_list.py +++ b/src/merge/resources/ticketing/types/paginated_attachment_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .attachment import Attachment -class PaginatedAttachmentList(pydantic_v1.BaseModel): +class PaginatedAttachmentList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Attachment]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/paginated_audit_log_event_list.py b/src/merge/resources/ticketing/types/paginated_audit_log_event_list.py index 1d4154d2..e5e04fa7 100644 --- a/src/merge/resources/ticketing/types/paginated_audit_log_event_list.py +++ b/src/merge/resources/ticketing/types/paginated_audit_log_event_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .audit_log_event import AuditLogEvent -class PaginatedAuditLogEventList(pydantic_v1.BaseModel): +class PaginatedAuditLogEventList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[AuditLogEvent]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/paginated_collection_list.py b/src/merge/resources/ticketing/types/paginated_collection_list.py index 28f19e7a..fbb99b26 100644 --- a/src/merge/resources/ticketing/types/paginated_collection_list.py +++ b/src/merge/resources/ticketing/types/paginated_collection_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .collection import Collection -class PaginatedCollectionList(pydantic_v1.BaseModel): +class PaginatedCollectionList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Collection]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/paginated_comment_list.py b/src/merge/resources/ticketing/types/paginated_comment_list.py index 73662dfc..1af47c66 100644 --- a/src/merge/resources/ticketing/types/paginated_comment_list.py +++ b/src/merge/resources/ticketing/types/paginated_comment_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .comment import Comment -class PaginatedCommentList(pydantic_v1.BaseModel): +class PaginatedCommentList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Comment]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/paginated_contact_list.py b/src/merge/resources/ticketing/types/paginated_contact_list.py index dbe7c925..5e9c3fb7 100644 --- a/src/merge/resources/ticketing/types/paginated_contact_list.py +++ b/src/merge/resources/ticketing/types/paginated_contact_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .contact import Contact -class PaginatedContactList(pydantic_v1.BaseModel): +class PaginatedContactList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Contact]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/paginated_issue_list.py b/src/merge/resources/ticketing/types/paginated_issue_list.py index 1016e29a..da8437f1 100644 --- a/src/merge/resources/ticketing/types/paginated_issue_list.py +++ b/src/merge/resources/ticketing/types/paginated_issue_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .issue import Issue -class PaginatedIssueList(pydantic_v1.BaseModel): +class PaginatedIssueList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Issue]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/paginated_project_list.py b/src/merge/resources/ticketing/types/paginated_project_list.py index ee0990dd..148a8776 100644 --- a/src/merge/resources/ticketing/types/paginated_project_list.py +++ b/src/merge/resources/ticketing/types/paginated_project_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .project import Project -class PaginatedProjectList(pydantic_v1.BaseModel): +class PaginatedProjectList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Project]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/paginated_remote_field_class_list.py b/src/merge/resources/ticketing/types/paginated_remote_field_class_list.py index 9105daee..bf0a4fd5 100644 --- a/src/merge/resources/ticketing/types/paginated_remote_field_class_list.py +++ b/src/merge/resources/ticketing/types/paginated_remote_field_class_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_field_class import RemoteFieldClass -class PaginatedRemoteFieldClassList(pydantic_v1.BaseModel): +class PaginatedRemoteFieldClassList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[RemoteFieldClass]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/paginated_role_list.py b/src/merge/resources/ticketing/types/paginated_role_list.py index 162386fe..1bef4555 100644 --- a/src/merge/resources/ticketing/types/paginated_role_list.py +++ b/src/merge/resources/ticketing/types/paginated_role_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .role import Role -class PaginatedRoleList(pydantic_v1.BaseModel): +class PaginatedRoleList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Role]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/paginated_sync_status_list.py b/src/merge/resources/ticketing/types/paginated_sync_status_list.py index 6c88197e..7faca80c 100644 --- a/src/merge/resources/ticketing/types/paginated_sync_status_list.py +++ b/src/merge/resources/ticketing/types/paginated_sync_status_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .sync_status import SyncStatus -class PaginatedSyncStatusList(pydantic_v1.BaseModel): +class PaginatedSyncStatusList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[SyncStatus]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/paginated_tag_list.py b/src/merge/resources/ticketing/types/paginated_tag_list.py index a41f1d61..286bc9e1 100644 --- a/src/merge/resources/ticketing/types/paginated_tag_list.py +++ b/src/merge/resources/ticketing/types/paginated_tag_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .tag import Tag -class PaginatedTagList(pydantic_v1.BaseModel): +class PaginatedTagList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Tag]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/paginated_team_list.py b/src/merge/resources/ticketing/types/paginated_team_list.py index fdb9e669..5d227c70 100644 --- a/src/merge/resources/ticketing/types/paginated_team_list.py +++ b/src/merge/resources/ticketing/types/paginated_team_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .team import Team -class PaginatedTeamList(pydantic_v1.BaseModel): +class PaginatedTeamList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Team]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/paginated_ticket_list.py b/src/merge/resources/ticketing/types/paginated_ticket_list.py index 32ce26a9..2144ec72 100644 --- a/src/merge/resources/ticketing/types/paginated_ticket_list.py +++ b/src/merge/resources/ticketing/types/paginated_ticket_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .ticket import Ticket -class PaginatedTicketList(pydantic_v1.BaseModel): +class PaginatedTicketList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[Ticket]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/paginated_user_list.py b/src/merge/resources/ticketing/types/paginated_user_list.py index 6faaf352..d3e02f65 100644 --- a/src/merge/resources/ticketing/types/paginated_user_list.py +++ b/src/merge/resources/ticketing/types/paginated_user_list.py @@ -1,32 +1,23 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .user import User -class PaginatedUserList(pydantic_v1.BaseModel): +class PaginatedUserList(UniversalBaseModel): next: typing.Optional[str] previous: typing.Optional[str] results: typing.Optional[typing.List[User]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/patched_ticket_request.py b/src/merge/resources/ticketing/types/patched_ticket_request.py index 7dce49e3..05098bf2 100644 --- a/src/merge/resources/ticketing/types/patched_ticket_request.py +++ b/src/merge/resources/ticketing/types/patched_ticket_request.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .patched_ticket_request_priority import PatchedTicketRequestPriority from .patched_ticket_request_status import PatchedTicketRequestStatus from .remote_field_request import RemoteFieldRequest -class PatchedTicketRequest(pydantic_v1.BaseModel): +class PatchedTicketRequest(UniversalBaseModel): """ # The Ticket Object @@ -23,23 +24,23 @@ class PatchedTicketRequest(pydantic_v1.BaseModel): TODO """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The ticket's name. """ assignees: typing.Optional[typing.List[typing.Optional[str]]] - creator: typing.Optional[str] = pydantic_v1.Field() + creator: typing.Optional[str] = pydantic.Field() """ The user who created this ticket. """ - due_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + due_date: typing.Optional[dt.datetime] = pydantic.Field() """ The ticket's due date. """ - status: typing.Optional[PatchedTicketRequestStatus] = pydantic_v1.Field() + status: typing.Optional[PatchedTicketRequestStatus] = pydantic.Field() """ The current status of the ticket. @@ -49,44 +50,44 @@ class PatchedTicketRequest(pydantic_v1.BaseModel): - `ON_HOLD` - ON_HOLD """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The ticket’s description. HTML version of description is mapped if supported by the third-party platform. """ collections: typing.Optional[typing.List[typing.Optional[str]]] - ticket_type: typing.Optional[str] = pydantic_v1.Field() + ticket_type: typing.Optional[str] = pydantic.Field() """ The sub category of the ticket within the 3rd party system. Examples include incident, task, subtask or to-do. """ - account: typing.Optional[str] = pydantic_v1.Field() + account: typing.Optional[str] = pydantic.Field() """ The account associated with the ticket. """ - contact: typing.Optional[str] = pydantic_v1.Field() + contact: typing.Optional[str] = pydantic.Field() """ The contact associated with the ticket. """ - parent_ticket: typing.Optional[str] = pydantic_v1.Field() + parent_ticket: typing.Optional[str] = pydantic.Field() """ The ticket's parent ticket. """ tags: typing.Optional[typing.List[typing.Optional[str]]] - completed_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + completed_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the ticket was completed. """ - ticket_url: typing.Optional[str] = pydantic_v1.Field() + ticket_url: typing.Optional[str] = pydantic.Field() """ The 3rd party url of the Ticket. """ - priority: typing.Optional[PatchedTicketRequestPriority] = pydantic_v1.Field() + priority: typing.Optional[PatchedTicketRequestPriority] = pydantic.Field() """ The priority or urgency of the Ticket. @@ -100,20 +101,11 @@ class PatchedTicketRequest(pydantic_v1.BaseModel): linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] remote_fields: typing.Optional[typing.List[RemoteFieldRequest]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/project.py b/src/merge/resources/ticketing/types/project.py index d1744e87..8b9f1ab9 100644 --- a/src/merge/resources/ticketing/types/project.py +++ b/src/merge/resources/ticketing/types/project.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData -class Project(pydantic_v1.BaseModel): +class Project(UniversalBaseModel): """ # The Project Object @@ -22,32 +23,32 @@ class Project(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The project's name. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The project's description. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -55,20 +56,11 @@ class Project(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/remote_data.py b/src/merge/resources/ticketing/types/remote_data.py index 098f551b..d50bfca2 100644 --- a/src/merge/resources/ticketing/types/remote_data.py +++ b/src/merge/resources/ticketing/types/remote_data.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteData(pydantic_v1.BaseModel): + +class RemoteData(UniversalBaseModel): path: str data: typing.Optional[typing.Any] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/remote_endpoint_info.py b/src/merge/resources/ticketing/types/remote_endpoint_info.py index da6037bc..9f627cae 100644 --- a/src/merge/resources/ticketing/types/remote_endpoint_info.py +++ b/src/merge/resources/ticketing/types/remote_endpoint_info.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteEndpointInfo(pydantic_v1.BaseModel): + +class RemoteEndpointInfo(UniversalBaseModel): method: str url_path: str field_traversal_path: typing.List[typing.Any] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/remote_field.py b/src/merge/resources/ticketing/types/remote_field.py index 6d504ed7..188ceb1d 100644 --- a/src/merge/resources/ticketing/types/remote_field.py +++ b/src/merge/resources/ticketing/types/remote_field.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_field_remote_field_class import RemoteFieldRemoteFieldClass -class RemoteField(pydantic_v1.BaseModel): +class RemoteField(UniversalBaseModel): remote_field_class: RemoteFieldRemoteFieldClass value: typing.Optional[typing.Any] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/remote_field_api.py b/src/merge/resources/ticketing/types/remote_field_api.py index c2a16698..1d1efb6a 100644 --- a/src/merge/resources/ticketing/types/remote_field_api.py +++ b/src/merge/resources/ticketing/types/remote_field_api.py @@ -1,39 +1,28 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .advanced_metadata import AdvancedMetadata from .remote_endpoint_info import RemoteEndpointInfo from .remote_field_api_coverage import RemoteFieldApiCoverage -class RemoteFieldApi(pydantic_v1.BaseModel): - schema_: typing.Dict[str, typing.Any] = pydantic_v1.Field(alias="schema") +class RemoteFieldApi(UniversalBaseModel): + schema_: typing.Dict[str, typing.Any] = pydantic.Field(alias="schema") remote_key_name: str remote_endpoint_info: RemoteEndpointInfo example_values: typing.List[typing.Any] advanced_metadata: typing.Optional[AdvancedMetadata] coverage: typing.Optional[RemoteFieldApiCoverage] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/remote_field_api_response.py b/src/merge/resources/ticketing/types/remote_field_api_response.py index 0b04afcf..995048ff 100644 --- a/src/merge/resources/ticketing/types/remote_field_api_response.py +++ b/src/merge/resources/ticketing/types/remote_field_api_response.py @@ -1,42 +1,31 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .remote_field_api import RemoteFieldApi - - -class RemoteFieldApiResponse(pydantic_v1.BaseModel): - ticket: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Ticket") - comment: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Comment") - project: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Project") - collection: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Collection") - user: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="User") - role: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Role") - account: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Account") - team: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Team") - attachment: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Attachment") - tag: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Tag") - contact: typing.Optional[typing.List[RemoteFieldApi]] = pydantic_v1.Field(alias="Contact") +import pydantic - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .remote_field_api import RemoteFieldApi - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - populate_by_name = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} +class RemoteFieldApiResponse(UniversalBaseModel): + ticket: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Ticket") + comment: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Comment") + project: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Project") + collection: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Collection") + user: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="User") + role: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Role") + account: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Account") + team: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Team") + attachment: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Attachment") + tag: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Tag") + contact: typing.Optional[typing.List[RemoteFieldApi]] = pydantic.Field(alias="Contact") + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/remote_field_class.py b/src/merge/resources/ticketing/types/remote_field_class.py index d8f7a788..7dd62e6b 100644 --- a/src/merge/resources/ticketing/types/remote_field_class.py +++ b/src/merge/resources/ticketing/types/remote_field_class.py @@ -1,17 +1,17 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .item_schema import ItemSchema from .remote_field_class_field_choices_item import RemoteFieldClassFieldChoicesItem from .remote_field_class_field_format import RemoteFieldClassFieldFormat from .remote_field_class_field_type import RemoteFieldClassFieldType -class RemoteFieldClass(pydantic_v1.BaseModel): +class RemoteFieldClass(UniversalBaseModel): id: typing.Optional[str] display_name: typing.Optional[str] remote_key_name: typing.Optional[str] @@ -23,20 +23,11 @@ class RemoteFieldClass(pydantic_v1.BaseModel): field_choices: typing.Optional[typing.List[RemoteFieldClassFieldChoicesItem]] item_schema: typing.Optional[ItemSchema] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/remote_field_class_field_choices_item.py b/src/merge/resources/ticketing/types/remote_field_class_field_choices_item.py index 19926fa0..4b011ac4 100644 --- a/src/merge/resources/ticketing/types/remote_field_class_field_choices_item.py +++ b/src/merge/resources/ticketing/types/remote_field_class_field_choices_item.py @@ -1,30 +1,21 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteFieldClassFieldChoicesItem(pydantic_v1.BaseModel): + +class RemoteFieldClassFieldChoicesItem(UniversalBaseModel): value: typing.Optional[typing.Any] display_name: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/remote_field_request.py b/src/merge/resources/ticketing/types/remote_field_request.py index 84d08873..46af4bfc 100644 --- a/src/merge/resources/ticketing/types/remote_field_request.py +++ b/src/merge/resources/ticketing/types/remote_field_request.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_field_request_remote_field_class import RemoteFieldRequestRemoteFieldClass -class RemoteFieldRequest(pydantic_v1.BaseModel): +class RemoteFieldRequest(UniversalBaseModel): remote_field_class: RemoteFieldRequestRemoteFieldClass value: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/remote_key.py b/src/merge/resources/ticketing/types/remote_key.py index e0bec368..0ce7d620 100644 --- a/src/merge/resources/ticketing/types/remote_key.py +++ b/src/merge/resources/ticketing/types/remote_key.py @@ -1,13 +1,13 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class RemoteKey(pydantic_v1.BaseModel): + +class RemoteKey(UniversalBaseModel): """ # The RemoteKey Object @@ -23,20 +23,11 @@ class RemoteKey(pydantic_v1.BaseModel): name: str key: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/remote_response.py b/src/merge/resources/ticketing/types/remote_response.py index f39951ee..5551bafc 100644 --- a/src/merge/resources/ticketing/types/remote_response.py +++ b/src/merge/resources/ticketing/types/remote_response.py @@ -1,14 +1,14 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .response_type_enum import ResponseTypeEnum -class RemoteResponse(pydantic_v1.BaseModel): +class RemoteResponse(UniversalBaseModel): """ # The RemoteResponse Object @@ -29,20 +29,11 @@ class RemoteResponse(pydantic_v1.BaseModel): response_type: typing.Optional[ResponseTypeEnum] headers: typing.Optional[typing.Dict[str, typing.Any]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/role.py b/src/merge/resources/ticketing/types/role.py index acc9822f..95fba772 100644 --- a/src/merge/resources/ticketing/types/role.py +++ b/src/merge/resources/ticketing/types/role.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData from .role_ticket_access import RoleTicketAccess from .role_ticket_actions_item import RoleTicketActionsItem -class Role(pydantic_v1.BaseModel): +class Role(UniversalBaseModel): """ # The Role Object @@ -24,32 +25,32 @@ class Role(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The name of the Role. """ - ticket_actions: typing.Optional[typing.List[typing.Optional[RoleTicketActionsItem]]] = pydantic_v1.Field() + ticket_actions: typing.Optional[typing.List[typing.Optional[RoleTicketActionsItem]]] = pydantic.Field() """ The set of actions that a User with this Role can perform. Possible enum values include: `VIEW`, `CREATE`, `EDIT`, `DELETE`, `CLOSE`, and `ASSIGN`. """ - ticket_access: typing.Optional[RoleTicketAccess] = pydantic_v1.Field() + ticket_access: typing.Optional[RoleTicketAccess] = pydantic.Field() """ The level of Ticket access that a User with this Role can perform. @@ -58,7 +59,7 @@ class Role(pydantic_v1.BaseModel): - `TEAM_ONLY` - TEAM_ONLY """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -66,20 +67,11 @@ class Role(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/sync_status.py b/src/merge/resources/ticketing/types/sync_status.py index c6b7cbc4..03668cbf 100644 --- a/src/merge/resources/ticketing/types/sync_status.py +++ b/src/merge/resources/ticketing/types/sync_status.py @@ -3,13 +3,14 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .selective_sync_configurations_usage_enum import SelectiveSyncConfigurationsUsageEnum from .sync_status_status_enum import SyncStatusStatusEnum -class SyncStatus(pydantic_v1.BaseModel): +class SyncStatus(UniversalBaseModel): """ # The SyncStatus Object @@ -30,20 +31,11 @@ class SyncStatus(pydantic_v1.BaseModel): is_initial_sync: bool selective_sync_configurations_usage: typing.Optional[SelectiveSyncConfigurationsUsageEnum] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/tag.py b/src/merge/resources/ticketing/types/tag.py index da5d8d13..be1eafaf 100644 --- a/src/merge/resources/ticketing/types/tag.py +++ b/src/merge/resources/ticketing/types/tag.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData -class Tag(pydantic_v1.BaseModel): +class Tag(UniversalBaseModel): """ # The Tag Object @@ -21,27 +22,27 @@ class Tag(pydantic_v1.BaseModel): TODO """ - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The tag's name. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -49,20 +50,11 @@ class Tag(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/team.py b/src/merge/resources/ticketing/types/team.py index dfe0542e..f5fb3acb 100644 --- a/src/merge/resources/ticketing/types/team.py +++ b/src/merge/resources/ticketing/types/team.py @@ -3,12 +3,13 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData -class Team(pydantic_v1.BaseModel): +class Team(UniversalBaseModel): """ # The Team Object @@ -22,32 +23,32 @@ class Team(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The team's name. """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The team's description. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -55,20 +56,11 @@ class Team(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/ticket.py b/src/merge/resources/ticketing/types/ticket.py index 110d25bd..e3ac0f17 100644 --- a/src/merge/resources/ticketing/types/ticket.py +++ b/src/merge/resources/ticketing/types/ticket.py @@ -5,8 +5,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel, update_forward_refs from .remote_data import RemoteData from .remote_field import RemoteField from .ticket_account import TicketAccount @@ -18,7 +19,7 @@ from .ticket_status import TicketStatus -class Ticket(pydantic_v1.BaseModel): +class Ticket(UniversalBaseModel): """ # The Ticket Object @@ -32,38 +33,38 @@ class Ticket(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The ticket's name. """ assignees: typing.Optional[typing.List[typing.Optional[TicketAssigneesItem]]] - creator: typing.Optional[TicketCreator] = pydantic_v1.Field() + creator: typing.Optional[TicketCreator] = pydantic.Field() """ The user who created this ticket. """ - due_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + due_date: typing.Optional[dt.datetime] = pydantic.Field() """ The ticket's due date. """ - status: typing.Optional[TicketStatus] = pydantic_v1.Field() + status: typing.Optional[TicketStatus] = pydantic.Field() """ The current status of the ticket. @@ -73,56 +74,56 @@ class Ticket(pydantic_v1.BaseModel): - `ON_HOLD` - ON_HOLD """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The ticket’s description. HTML version of description is mapped if supported by the third-party platform. """ collections: typing.Optional[typing.List[typing.Optional[TicketCollectionsItem]]] - ticket_type: typing.Optional[str] = pydantic_v1.Field() + ticket_type: typing.Optional[str] = pydantic.Field() """ The sub category of the ticket within the 3rd party system. Examples include incident, task, subtask or to-do. """ - account: typing.Optional[TicketAccount] = pydantic_v1.Field() + account: typing.Optional[TicketAccount] = pydantic.Field() """ The account associated with the ticket. """ - contact: typing.Optional[TicketContact] = pydantic_v1.Field() + contact: typing.Optional[TicketContact] = pydantic.Field() """ The contact associated with the ticket. """ - parent_ticket: typing.Optional[TicketParentTicket] = pydantic_v1.Field() + parent_ticket: typing.Optional[TicketParentTicket] = pydantic.Field() """ The ticket's parent ticket. """ attachments: typing.Optional[typing.List[typing.Optional[TicketAttachmentsItem]]] tags: typing.Optional[typing.List[typing.Optional[str]]] - remote_created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_created_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's ticket was created. """ - remote_updated_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + remote_updated_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the third party's ticket was updated. """ - completed_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + completed_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the ticket was completed. """ remote_was_deleted: typing.Optional[bool] - ticket_url: typing.Optional[str] = pydantic_v1.Field() + ticket_url: typing.Optional[str] = pydantic.Field() """ The 3rd party url of the Ticket. """ - priority: typing.Optional[TicketPriority] = pydantic_v1.Field() + priority: typing.Optional[TicketPriority] = pydantic.Field() """ The priority or urgency of the Ticket. @@ -136,26 +137,17 @@ class Ticket(pydantic_v1.BaseModel): remote_data: typing.Optional[typing.List[RemoteData]] remote_fields: typing.Optional[typing.List[RemoteField]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow from .ticket_attachments_item import TicketAttachmentsItem # noqa: E402 from .ticket_parent_ticket import TicketParentTicket # noqa: E402 -Ticket.update_forward_refs() +update_forward_refs(Ticket) diff --git a/src/merge/resources/ticketing/types/ticket_request.py b/src/merge/resources/ticketing/types/ticket_request.py index d4b59f97..46d5b8c8 100644 --- a/src/merge/resources/ticketing/types/ticket_request.py +++ b/src/merge/resources/ticketing/types/ticket_request.py @@ -3,8 +3,9 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_field_request import RemoteFieldRequest from .ticket_request_account import TicketRequestAccount from .ticket_request_assignees_item import TicketRequestAssigneesItem @@ -17,7 +18,7 @@ from .ticket_request_status import TicketRequestStatus -class TicketRequest(pydantic_v1.BaseModel): +class TicketRequest(UniversalBaseModel): """ # The Ticket Object @@ -30,23 +31,23 @@ class TicketRequest(pydantic_v1.BaseModel): TODO """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The ticket's name. """ assignees: typing.Optional[typing.List[typing.Optional[TicketRequestAssigneesItem]]] - creator: typing.Optional[TicketRequestCreator] = pydantic_v1.Field() + creator: typing.Optional[TicketRequestCreator] = pydantic.Field() """ The user who created this ticket. """ - due_date: typing.Optional[dt.datetime] = pydantic_v1.Field() + due_date: typing.Optional[dt.datetime] = pydantic.Field() """ The ticket's due date. """ - status: typing.Optional[TicketRequestStatus] = pydantic_v1.Field() + status: typing.Optional[TicketRequestStatus] = pydantic.Field() """ The current status of the ticket. @@ -56,45 +57,45 @@ class TicketRequest(pydantic_v1.BaseModel): - `ON_HOLD` - ON_HOLD """ - description: typing.Optional[str] = pydantic_v1.Field() + description: typing.Optional[str] = pydantic.Field() """ The ticket’s description. HTML version of description is mapped if supported by the third-party platform. """ collections: typing.Optional[typing.List[typing.Optional[TicketRequestCollectionsItem]]] - ticket_type: typing.Optional[str] = pydantic_v1.Field() + ticket_type: typing.Optional[str] = pydantic.Field() """ The sub category of the ticket within the 3rd party system. Examples include incident, task, subtask or to-do. """ - account: typing.Optional[TicketRequestAccount] = pydantic_v1.Field() + account: typing.Optional[TicketRequestAccount] = pydantic.Field() """ The account associated with the ticket. """ - contact: typing.Optional[TicketRequestContact] = pydantic_v1.Field() + contact: typing.Optional[TicketRequestContact] = pydantic.Field() """ The contact associated with the ticket. """ - parent_ticket: typing.Optional[TicketRequestParentTicket] = pydantic_v1.Field() + parent_ticket: typing.Optional[TicketRequestParentTicket] = pydantic.Field() """ The ticket's parent ticket. """ attachments: typing.Optional[typing.List[typing.Optional[TicketRequestAttachmentsItem]]] tags: typing.Optional[typing.List[typing.Optional[str]]] - completed_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + completed_at: typing.Optional[dt.datetime] = pydantic.Field() """ When the ticket was completed. """ - ticket_url: typing.Optional[str] = pydantic_v1.Field() + ticket_url: typing.Optional[str] = pydantic.Field() """ The 3rd party url of the Ticket. """ - priority: typing.Optional[TicketRequestPriority] = pydantic_v1.Field() + priority: typing.Optional[TicketRequestPriority] = pydantic.Field() """ The priority or urgency of the Ticket. @@ -108,20 +109,11 @@ class TicketRequest(pydantic_v1.BaseModel): linked_account_params: typing.Optional[typing.Dict[str, typing.Any]] remote_fields: typing.Optional[typing.List[RemoteFieldRequest]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/ticket_response.py b/src/merge/resources/ticketing/types/ticket_response.py index ad1ea72e..ca39d5f0 100644 --- a/src/merge/resources/ticketing/types/ticket_response.py +++ b/src/merge/resources/ticketing/types/ticket_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .ticket import Ticket from .warning_validation_problem import WarningValidationProblem -class TicketResponse(pydantic_v1.BaseModel): +class TicketResponse(UniversalBaseModel): model: Ticket warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/ticketing_attachment_response.py b/src/merge/resources/ticketing/types/ticketing_attachment_response.py index cdf1d3e1..10345490 100644 --- a/src/merge/resources/ticketing/types/ticketing_attachment_response.py +++ b/src/merge/resources/ticketing/types/ticketing_attachment_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .attachment import Attachment from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .warning_validation_problem import WarningValidationProblem -class TicketingAttachmentResponse(pydantic_v1.BaseModel): +class TicketingAttachmentResponse(UniversalBaseModel): model: Attachment warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/ticketing_contact_response.py b/src/merge/resources/ticketing/types/ticketing_contact_response.py index b04f143b..a3c56c7c 100644 --- a/src/merge/resources/ticketing/types/ticketing_contact_response.py +++ b/src/merge/resources/ticketing/types/ticketing_contact_response.py @@ -1,36 +1,27 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .contact import Contact from .debug_mode_log import DebugModeLog from .error_validation_problem import ErrorValidationProblem from .warning_validation_problem import WarningValidationProblem -class TicketingContactResponse(pydantic_v1.BaseModel): +class TicketingContactResponse(UniversalBaseModel): model: Contact warnings: typing.List[WarningValidationProblem] errors: typing.List[ErrorValidationProblem] logs: typing.Optional[typing.List[DebugModeLog]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/user.py b/src/merge/resources/ticketing/types/user.py index ed7ae3da..c1490b84 100644 --- a/src/merge/resources/ticketing/types/user.py +++ b/src/merge/resources/ticketing/types/user.py @@ -3,14 +3,15 @@ import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .remote_data import RemoteData from .user_roles_item import UserRolesItem from .user_teams_item import UserTeamsItem -class User(pydantic_v1.BaseModel): +class User(UniversalBaseModel): """ # The User Object @@ -24,44 +25,44 @@ class User(pydantic_v1.BaseModel): """ id: typing.Optional[str] - remote_id: typing.Optional[str] = pydantic_v1.Field() + remote_id: typing.Optional[str] = pydantic.Field() """ The third-party API ID of the matching object. """ - created_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + created_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was created by Merge. """ - modified_at: typing.Optional[dt.datetime] = pydantic_v1.Field() + modified_at: typing.Optional[dt.datetime] = pydantic.Field() """ The datetime that this object was modified by Merge. """ - name: typing.Optional[str] = pydantic_v1.Field() + name: typing.Optional[str] = pydantic.Field() """ The user's name. """ - email_address: typing.Optional[str] = pydantic_v1.Field() + email_address: typing.Optional[str] = pydantic.Field() """ The user's email address. """ - is_active: typing.Optional[bool] = pydantic_v1.Field() + is_active: typing.Optional[bool] = pydantic.Field() """ Whether or not the user is active. """ teams: typing.Optional[typing.List[typing.Optional[UserTeamsItem]]] roles: typing.Optional[typing.List[typing.Optional[UserRolesItem]]] - avatar: typing.Optional[str] = pydantic_v1.Field() + avatar: typing.Optional[str] = pydantic.Field() """ The user's avatar picture. """ - remote_was_deleted: typing.Optional[bool] = pydantic_v1.Field() + remote_was_deleted: typing.Optional[bool] = pydantic.Field() """ Indicates whether or not this object has been deleted in the third party platform. """ @@ -69,20 +70,11 @@ class User(pydantic_v1.BaseModel): field_mappings: typing.Optional[typing.Dict[str, typing.Any]] remote_data: typing.Optional[typing.List[RemoteData]] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/validation_problem_source.py b/src/merge/resources/ticketing/types/validation_problem_source.py index fde15b40..c65d82ef 100644 --- a/src/merge/resources/ticketing/types/validation_problem_source.py +++ b/src/merge/resources/ticketing/types/validation_problem_source.py @@ -1,29 +1,20 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class ValidationProblemSource(pydantic_v1.BaseModel): - pointer: str - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} +class ValidationProblemSource(UniversalBaseModel): + pointer: str - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/warning_validation_problem.py b/src/merge/resources/ticketing/types/warning_validation_problem.py index 6baf9600..348d668a 100644 --- a/src/merge/resources/ticketing/types/warning_validation_problem.py +++ b/src/merge/resources/ticketing/types/warning_validation_problem.py @@ -1,33 +1,24 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic + +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from .validation_problem_source import ValidationProblemSource -class WarningValidationProblem(pydantic_v1.BaseModel): +class WarningValidationProblem(UniversalBaseModel): source: typing.Optional[ValidationProblemSource] title: str detail: str problem_type: str - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/merge/resources/ticketing/types/webhook_receiver.py b/src/merge/resources/ticketing/types/webhook_receiver.py index 0544f256..bb10af95 100644 --- a/src/merge/resources/ticketing/types/webhook_receiver.py +++ b/src/merge/resources/ticketing/types/webhook_receiver.py @@ -1,31 +1,22 @@ # This file was auto-generated by Fern from our API Definition. -import datetime as dt import typing -from ....core.datetime_utils import serialize_datetime -from ....core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +import pydantic +from ....core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -class WebhookReceiver(pydantic_v1.BaseModel): + +class WebhookReceiver(UniversalBaseModel): event: str is_active: bool key: typing.Optional[str] - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs} - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none) - ) + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/tests/utils/test_http_client.py b/tests/utils/test_http_client.py new file mode 100644 index 00000000..517b52b5 --- /dev/null +++ b/tests/utils/test_http_client.py @@ -0,0 +1,47 @@ +# This file was auto-generated by Fern from our API Definition. + +from merge.core.http_client import get_request_body +from merge.core.request_options import RequestOptions + + +def get_request_options() -> RequestOptions: + return {"additional_body_parameters": {"see you": "later"}} + + +def test_get_json_request_body() -> None: + json_body, data_body = get_request_body(json={"hello": "world"}, data=None, request_options=None, omit=None) + assert json_body == {"hello": "world"} + assert data_body is None + + json_body_extras, data_body_extras = get_request_body( + json={"goodbye": "world"}, data=None, request_options=get_request_options(), omit=None + ) + + assert json_body_extras == {"goodbye": "world", "see you": "later"} + assert data_body_extras is None + + +def test_get_files_request_body() -> None: + json_body, data_body = get_request_body(json=None, data={"hello": "world"}, request_options=None, omit=None) + assert data_body == {"hello": "world"} + assert json_body is None + + json_body_extras, data_body_extras = get_request_body( + json=None, data={"goodbye": "world"}, request_options=get_request_options(), omit=None + ) + + assert data_body_extras == {"goodbye": "world", "see you": "later"} + assert json_body_extras is None + + +def test_get_none_request_body() -> None: + json_body, data_body = get_request_body(json=None, data=None, request_options=None, omit=None) + assert data_body is None + assert json_body is None + + json_body_extras, data_body_extras = get_request_body( + json=None, data=None, request_options=get_request_options(), omit=None + ) + + assert json_body_extras == {"see you": "later"} + assert data_body_extras is None diff --git a/tests/utils/test_query_encoding.py b/tests/utils/test_query_encoding.py new file mode 100644 index 00000000..43cab010 --- /dev/null +++ b/tests/utils/test_query_encoding.py @@ -0,0 +1,13 @@ +# This file was auto-generated by Fern from our API Definition. + +from merge.core.query_encoder import encode_query + + +def test_query_encoding() -> None: + assert encode_query({"hello world": "hello world"}) == {"hello world": "hello world"} + assert encode_query({"hello_world": {"hello": "world"}}) == {"hello_world[hello]": "world"} + assert encode_query({"hello_world": {"hello": {"world": "today"}, "test": "this"}, "hi": "there"}) == { + "hello_world[hello][world]": "today", + "hello_world[test]": "this", + "hi": "there", + } From 4c7774730cb1d6e2320ae25bb93a3c77463f33d6 Mon Sep 17 00:00:00 2001 From: fern-api <115122769+fern-api[bot]@users.noreply.github.com> Date: Wed, 24 Jul 2024 07:59:15 -0400 Subject: [PATCH 2/2] update base model --- src/merge/core/pydantic_utilities.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/merge/core/pydantic_utilities.py b/src/merge/core/pydantic_utilities.py index 0f24b0ea..7c5418b5 100644 --- a/src/merge/core/pydantic_utilities.py +++ b/src/merge/core/pydantic_utilities.py @@ -138,7 +138,7 @@ def encode_by_type(o: typing.Any) -> typing.Any: def update_forward_refs(model: typing.Type["Model"], **localns: typing.Any) -> None: if IS_PYDANTIC_V2: - model.model_rebuild(force=True) # type: ignore # Pydantic v2 + model.model_rebuild(force=True, raise_errors=False) # type: ignore # Pydantic v2 else: model.update_forward_refs(**localns)