diff --git a/.stats.yml b/.stats.yml
index 7662c60d..8bdf2ea0 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1 +1 @@
-configured_endpoints: 110
+configured_endpoints: 112
diff --git a/api.md b/api.md
index 49fc4d95..21359f56 100644
--- a/api.md
+++ b/api.md
@@ -114,11 +114,13 @@ Methods:
Types:
```python
-from lithic.types import Tokenization, TokenizationSimulateResponse
+from lithic.types import Tokenization, TokenizationRetrieveResponse, TokenizationSimulateResponse
```
Methods:
+- client.tokenizations.retrieve(tokenization_token) -> TokenizationRetrieveResponse
+- client.tokenizations.list(\*\*params) -> SyncCursorPage[Tokenization]
- client.tokenizations.simulate(\*\*params) -> TokenizationSimulateResponse
# Cards
diff --git a/src/lithic/resources/tokenizations.py b/src/lithic/resources/tokenizations.py
index dda14b9a..2aec5963 100644
--- a/src/lithic/resources/tokenizations.py
+++ b/src/lithic/resources/tokenizations.py
@@ -2,18 +2,28 @@
from __future__ import annotations
+from typing import Union
+from datetime import date
from typing_extensions import Literal
import httpx
from .. import _legacy_response
-from ..types import TokenizationSimulateResponse, tokenization_simulate_params
+from ..types import (
+ Tokenization,
+ TokenizationRetrieveResponse,
+ TokenizationSimulateResponse,
+ tokenization_list_params,
+ tokenization_simulate_params,
+)
from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
from .._utils import maybe_transform
from .._compat import cached_property
from .._resource import SyncAPIResource, AsyncAPIResource
from .._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper
+from ..pagination import SyncCursorPage, AsyncCursorPage
from .._base_client import (
+ AsyncPaginator,
make_request_options,
)
@@ -29,6 +39,108 @@ def with_raw_response(self) -> TokenizationsWithRawResponse:
def with_streaming_response(self) -> TokenizationsWithStreamingResponse:
return TokenizationsWithStreamingResponse(self)
+ def retrieve(
+ self,
+ tokenization_token: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> TokenizationRetrieveResponse:
+ """
+ Get tokenization
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not tokenization_token:
+ raise ValueError(f"Expected a non-empty value for `tokenization_token` but received {tokenization_token!r}")
+ return self._get(
+ f"/tokenizations/{tokenization_token}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=TokenizationRetrieveResponse,
+ )
+
+ def list(
+ self,
+ *,
+ account_token: str | NotGiven = NOT_GIVEN,
+ begin: Union[str, date] | NotGiven = NOT_GIVEN,
+ card_token: str | NotGiven = NOT_GIVEN,
+ end: Union[str, date] | NotGiven = NOT_GIVEN,
+ ending_before: str | NotGiven = NOT_GIVEN,
+ page_size: int | NotGiven = NOT_GIVEN,
+ starting_after: str | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> SyncCursorPage[Tokenization]:
+ """
+ List card tokenizations
+
+ Args:
+ account_token: Filters for tokenizations associated with a specific account.
+
+ begin: Filter for tokenizations created after this date.
+
+ card_token: Filters for tokenizations associated with a specific card.
+
+ end: Filter for tokenizations created before this date.
+
+ ending_before: A cursor representing an item's token before which a page of results should end.
+ Used to retrieve the previous page of results before this item.
+
+ page_size: Page size (for pagination).
+
+ starting_after: A cursor representing an item's token after which a page of results should
+ begin. Used to retrieve the next page of results after this item.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/tokenizations",
+ page=SyncCursorPage[Tokenization],
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "account_token": account_token,
+ "begin": begin,
+ "card_token": card_token,
+ "end": end,
+ "ending_before": ending_before,
+ "page_size": page_size,
+ "starting_after": starting_after,
+ },
+ tokenization_list_params.TokenizationListParams,
+ ),
+ ),
+ model=Tokenization,
+ )
+
def simulate(
self,
*,
@@ -106,6 +218,108 @@ def with_raw_response(self) -> AsyncTokenizationsWithRawResponse:
def with_streaming_response(self) -> AsyncTokenizationsWithStreamingResponse:
return AsyncTokenizationsWithStreamingResponse(self)
+ async def retrieve(
+ self,
+ tokenization_token: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> TokenizationRetrieveResponse:
+ """
+ Get tokenization
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not tokenization_token:
+ raise ValueError(f"Expected a non-empty value for `tokenization_token` but received {tokenization_token!r}")
+ return await self._get(
+ f"/tokenizations/{tokenization_token}",
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=TokenizationRetrieveResponse,
+ )
+
+ def list(
+ self,
+ *,
+ account_token: str | NotGiven = NOT_GIVEN,
+ begin: Union[str, date] | NotGiven = NOT_GIVEN,
+ card_token: str | NotGiven = NOT_GIVEN,
+ end: Union[str, date] | NotGiven = NOT_GIVEN,
+ ending_before: str | NotGiven = NOT_GIVEN,
+ page_size: int | NotGiven = NOT_GIVEN,
+ starting_after: str | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> AsyncPaginator[Tokenization, AsyncCursorPage[Tokenization]]:
+ """
+ List card tokenizations
+
+ Args:
+ account_token: Filters for tokenizations associated with a specific account.
+
+ begin: Filter for tokenizations created after this date.
+
+ card_token: Filters for tokenizations associated with a specific card.
+
+ end: Filter for tokenizations created before this date.
+
+ ending_before: A cursor representing an item's token before which a page of results should end.
+ Used to retrieve the previous page of results before this item.
+
+ page_size: Page size (for pagination).
+
+ starting_after: A cursor representing an item's token after which a page of results should
+ begin. Used to retrieve the next page of results after this item.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get_api_list(
+ "/tokenizations",
+ page=AsyncCursorPage[Tokenization],
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "account_token": account_token,
+ "begin": begin,
+ "card_token": card_token,
+ "end": end,
+ "ending_before": ending_before,
+ "page_size": page_size,
+ "starting_after": starting_after,
+ },
+ tokenization_list_params.TokenizationListParams,
+ ),
+ ),
+ model=Tokenization,
+ )
+
async def simulate(
self,
*,
@@ -178,6 +392,12 @@ class TokenizationsWithRawResponse:
def __init__(self, tokenizations: Tokenizations) -> None:
self._tokenizations = tokenizations
+ self.retrieve = _legacy_response.to_raw_response_wrapper(
+ tokenizations.retrieve,
+ )
+ self.list = _legacy_response.to_raw_response_wrapper(
+ tokenizations.list,
+ )
self.simulate = _legacy_response.to_raw_response_wrapper(
tokenizations.simulate,
)
@@ -187,6 +407,12 @@ class AsyncTokenizationsWithRawResponse:
def __init__(self, tokenizations: AsyncTokenizations) -> None:
self._tokenizations = tokenizations
+ self.retrieve = _legacy_response.async_to_raw_response_wrapper(
+ tokenizations.retrieve,
+ )
+ self.list = _legacy_response.async_to_raw_response_wrapper(
+ tokenizations.list,
+ )
self.simulate = _legacy_response.async_to_raw_response_wrapper(
tokenizations.simulate,
)
@@ -196,6 +422,12 @@ class TokenizationsWithStreamingResponse:
def __init__(self, tokenizations: Tokenizations) -> None:
self._tokenizations = tokenizations
+ self.retrieve = to_streamed_response_wrapper(
+ tokenizations.retrieve,
+ )
+ self.list = to_streamed_response_wrapper(
+ tokenizations.list,
+ )
self.simulate = to_streamed_response_wrapper(
tokenizations.simulate,
)
@@ -205,6 +437,12 @@ class AsyncTokenizationsWithStreamingResponse:
def __init__(self, tokenizations: AsyncTokenizations) -> None:
self._tokenizations = tokenizations
+ self.retrieve = async_to_streamed_response_wrapper(
+ tokenizations.retrieve,
+ )
+ self.list = async_to_streamed_response_wrapper(
+ tokenizations.list,
+ )
self.simulate = async_to_streamed_response_wrapper(
tokenizations.simulate,
)
diff --git a/src/lithic/types/__init__.py b/src/lithic/types/__init__.py
index 28712475..bc1b39f6 100644
--- a/src/lithic/types/__init__.py
+++ b/src/lithic/types/__init__.py
@@ -61,6 +61,7 @@
from .payment_create_response import PaymentCreateResponse as PaymentCreateResponse
from .transaction_list_params import TransactionListParams as TransactionListParams
from .card_program_list_params import CardProgramListParams as CardProgramListParams
+from .tokenization_list_params import TokenizationListParams as TokenizationListParams
from .auth_rule_remove_response import AuthRuleRemoveResponse as AuthRuleRemoveResponse
from .card_get_embed_url_params import CardGetEmbedURLParams as CardGetEmbedURLParams
from .card_search_by_pan_params import CardSearchByPanParams as CardSearchByPanParams
@@ -82,6 +83,7 @@
from .account_holder_resubmit_params import AccountHolderResubmitParams as AccountHolderResubmitParams
from .account_holder_update_response import AccountHolderUpdateResponse as AccountHolderUpdateResponse
from .payment_simulate_return_params import PaymentSimulateReturnParams as PaymentSimulateReturnParams
+from .tokenization_retrieve_response import TokenizationRetrieveResponse as TokenizationRetrieveResponse
from .tokenization_simulate_response import TokenizationSimulateResponse as TokenizationSimulateResponse
from .financial_account_create_params import FinancialAccountCreateParams as FinancialAccountCreateParams
from .financial_account_update_params import FinancialAccountUpdateParams as FinancialAccountUpdateParams
diff --git a/src/lithic/types/tokenization.py b/src/lithic/types/tokenization.py
index e0229a73..f2511e31 100644
--- a/src/lithic/types/tokenization.py
+++ b/src/lithic/types/tokenization.py
@@ -1,20 +1,53 @@
# File generated from our OpenAPI spec by Stainless.
+from typing import List, Optional
from datetime import datetime
from typing_extensions import Literal
from .._models import BaseModel
-__all__ = ["Tokenization"]
+__all__ = ["Tokenization", "Event"]
+
+
+class Event(BaseModel):
+ token: Optional[str] = None
+ """Globally unique identifier for a Tokenization Event"""
+
+ created_at: Optional[datetime] = None
+ """Date and time when the tokenization event first occurred. UTC time zone."""
+
+ result: Optional[
+ Literal[
+ "APPROVED",
+ "DECLINED",
+ "NOTIFICATION_DELIVERED",
+ "REQUIRE_ADDITIONAL_AUTHENTICATION",
+ "TOKEN_ACTIVATED",
+ "TOKEN_CREATED",
+ "TOKEN_DEACTIVATED",
+ "TOKEN_INACTIVE",
+ "TOKEN_STATE_UNKNOWN",
+ "TOKEN_SUSPENDED",
+ "TOKEN_UPDATED",
+ ]
+ ] = None
+ """Enum representing the result of the tokenization event"""
+
+ type: Optional[
+ Literal[
+ "TOKENIZATION_2FA",
+ "TOKENIZATION_AUTHORIZATION",
+ "TOKENIZATION_DECISIONING",
+ "TOKENIZATION_ELIGIBILITY_CHECK",
+ "TOKENIZATION_UPDATED",
+ ]
+ ] = None
+ """Enum representing the type of tokenization event that occurred"""
class Tokenization(BaseModel):
token: str
- """
- A fixed-width 23-digit numeric identifier for the Transaction that may be set if
- the transaction originated from the Mastercard network. This number may be used
- for dispute tracking.
- """
+ """Globally unique identifier for a Tokenization"""
account_token: str
"""The account token associated with the card being tokenized."""
@@ -25,10 +58,20 @@ class Tokenization(BaseModel):
created_at: datetime
"""Date and time when the tokenization first occurred. UTC time zone."""
- status: Literal["APPROVED", "DECLINED", "REQUIRE_ADDITIONAL_AUTHENTICATION"]
+ status: Literal["ACTIVE", "DEACTIVATED", "INACTIVE", "PAUSED", "PENDING_2FA", "PENDING_ACTIVATION", "UNKNOWN"]
"""The status of the tokenization request"""
- token_requestor_name: Literal["APPLE_PAY", "GOOGLE", "SAMSUNG_PAY"]
+ token_requestor_name: Literal[
+ "AMAZON_ONE",
+ "ANDROID_PAY",
+ "APPLE_PAY",
+ "FITBIT_PAY",
+ "GARMIN_PAY",
+ "MICROSOFT_PAY",
+ "SAMSUNG_PAY",
+ "UNKNOWN",
+ "VISA_CHECKOUT",
+ ]
"""The entity that is requested the tokenization. Represents a Digital Wallet."""
token_unique_reference: str
@@ -36,3 +79,6 @@ class Tokenization(BaseModel):
updated_at: datetime
"""Latest date and time when the tokenization was updated. UTC time zone."""
+
+ events: Optional[List[Event]] = None
+ """A list of events related to the tokenization."""
diff --git a/src/lithic/types/tokenization_list_params.py b/src/lithic/types/tokenization_list_params.py
new file mode 100644
index 00000000..41901c2b
--- /dev/null
+++ b/src/lithic/types/tokenization_list_params.py
@@ -0,0 +1,41 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from __future__ import annotations
+
+from typing import Union
+from datetime import date
+from typing_extensions import Annotated, TypedDict
+
+from .._utils import PropertyInfo
+
+__all__ = ["TokenizationListParams"]
+
+
+class TokenizationListParams(TypedDict, total=False):
+ account_token: str
+ """Filters for tokenizations associated with a specific account."""
+
+ begin: Annotated[Union[str, date], PropertyInfo(format="iso8601")]
+ """Filter for tokenizations created after this date."""
+
+ card_token: str
+ """Filters for tokenizations associated with a specific card."""
+
+ end: Annotated[Union[str, date], PropertyInfo(format="iso8601")]
+ """Filter for tokenizations created before this date."""
+
+ ending_before: str
+ """A cursor representing an item's token before which a page of results should end.
+
+ Used to retrieve the previous page of results before this item.
+ """
+
+ page_size: int
+ """Page size (for pagination)."""
+
+ starting_after: str
+ """A cursor representing an item's token after which a page of results should
+ begin.
+
+ Used to retrieve the next page of results after this item.
+ """
diff --git a/src/lithic/types/tokenization_retrieve_response.py b/src/lithic/types/tokenization_retrieve_response.py
new file mode 100644
index 00000000..e0bc3a55
--- /dev/null
+++ b/src/lithic/types/tokenization_retrieve_response.py
@@ -0,0 +1,12 @@
+# File generated from our OpenAPI spec by Stainless.
+
+from typing import Optional
+
+from .._models import BaseModel
+from .tokenization import Tokenization
+
+__all__ = ["TokenizationRetrieveResponse"]
+
+
+class TokenizationRetrieveResponse(BaseModel):
+ data: Optional[Tokenization] = None
diff --git a/tests/api_resources/test_tokenizations.py b/tests/api_resources/test_tokenizations.py
index 314ca183..1d48cc9a 100644
--- a/tests/api_resources/test_tokenizations.py
+++ b/tests/api_resources/test_tokenizations.py
@@ -9,7 +9,13 @@
from lithic import Lithic, AsyncLithic
from tests.utils import assert_matches_type
-from lithic.types import TokenizationSimulateResponse
+from lithic.types import (
+ Tokenization,
+ TokenizationRetrieveResponse,
+ TokenizationSimulateResponse,
+)
+from lithic._utils import parse_date
+from lithic.pagination import SyncCursorPage, AsyncCursorPage
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -17,6 +23,82 @@
class TestTokenizations:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
+ @parametrize
+ def test_method_retrieve(self, client: Lithic) -> None:
+ tokenization = client.tokenizations.retrieve(
+ "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ )
+ assert_matches_type(TokenizationRetrieveResponse, tokenization, path=["response"])
+
+ @parametrize
+ def test_raw_response_retrieve(self, client: Lithic) -> None:
+ response = client.tokenizations.with_raw_response.retrieve(
+ "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ tokenization = response.parse()
+ assert_matches_type(TokenizationRetrieveResponse, tokenization, path=["response"])
+
+ @parametrize
+ def test_streaming_response_retrieve(self, client: Lithic) -> None:
+ with client.tokenizations.with_streaming_response.retrieve(
+ "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ tokenization = response.parse()
+ assert_matches_type(TokenizationRetrieveResponse, tokenization, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_retrieve(self, client: Lithic) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `tokenization_token` but received ''"):
+ client.tokenizations.with_raw_response.retrieve(
+ "",
+ )
+
+ @parametrize
+ def test_method_list(self, client: Lithic) -> None:
+ tokenization = client.tokenizations.list()
+ assert_matches_type(SyncCursorPage[Tokenization], tokenization, path=["response"])
+
+ @parametrize
+ def test_method_list_with_all_params(self, client: Lithic) -> None:
+ tokenization = client.tokenizations.list(
+ account_token="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ begin=parse_date("2019-12-27"),
+ card_token="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ end=parse_date("2019-12-27"),
+ ending_before="string",
+ page_size=1,
+ starting_after="string",
+ )
+ assert_matches_type(SyncCursorPage[Tokenization], tokenization, path=["response"])
+
+ @parametrize
+ def test_raw_response_list(self, client: Lithic) -> None:
+ response = client.tokenizations.with_raw_response.list()
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ tokenization = response.parse()
+ assert_matches_type(SyncCursorPage[Tokenization], tokenization, path=["response"])
+
+ @parametrize
+ def test_streaming_response_list(self, client: Lithic) -> None:
+ with client.tokenizations.with_streaming_response.list() as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ tokenization = response.parse()
+ assert_matches_type(SyncCursorPage[Tokenization], tokenization, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
@parametrize
def test_method_simulate(self, client: Lithic) -> None:
tokenization = client.tokenizations.simulate(
@@ -74,6 +156,82 @@ def test_streaming_response_simulate(self, client: Lithic) -> None:
class TestAsyncTokenizations:
parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ @parametrize
+ async def test_method_retrieve(self, async_client: AsyncLithic) -> None:
+ tokenization = await async_client.tokenizations.retrieve(
+ "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ )
+ assert_matches_type(TokenizationRetrieveResponse, tokenization, path=["response"])
+
+ @parametrize
+ async def test_raw_response_retrieve(self, async_client: AsyncLithic) -> None:
+ response = await async_client.tokenizations.with_raw_response.retrieve(
+ "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ tokenization = response.parse()
+ assert_matches_type(TokenizationRetrieveResponse, tokenization, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_retrieve(self, async_client: AsyncLithic) -> None:
+ async with async_client.tokenizations.with_streaming_response.retrieve(
+ "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ tokenization = await response.parse()
+ assert_matches_type(TokenizationRetrieveResponse, tokenization, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_retrieve(self, async_client: AsyncLithic) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `tokenization_token` but received ''"):
+ await async_client.tokenizations.with_raw_response.retrieve(
+ "",
+ )
+
+ @parametrize
+ async def test_method_list(self, async_client: AsyncLithic) -> None:
+ tokenization = await async_client.tokenizations.list()
+ assert_matches_type(AsyncCursorPage[Tokenization], tokenization, path=["response"])
+
+ @parametrize
+ async def test_method_list_with_all_params(self, async_client: AsyncLithic) -> None:
+ tokenization = await async_client.tokenizations.list(
+ account_token="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ begin=parse_date("2019-12-27"),
+ card_token="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ end=parse_date("2019-12-27"),
+ ending_before="string",
+ page_size=1,
+ starting_after="string",
+ )
+ assert_matches_type(AsyncCursorPage[Tokenization], tokenization, path=["response"])
+
+ @parametrize
+ async def test_raw_response_list(self, async_client: AsyncLithic) -> None:
+ response = await async_client.tokenizations.with_raw_response.list()
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ tokenization = response.parse()
+ assert_matches_type(AsyncCursorPage[Tokenization], tokenization, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_list(self, async_client: AsyncLithic) -> None:
+ async with async_client.tokenizations.with_streaming_response.list() as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ tokenization = await response.parse()
+ assert_matches_type(AsyncCursorPage[Tokenization], tokenization, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
@parametrize
async def test_method_simulate(self, async_client: AsyncLithic) -> None:
tokenization = await async_client.tokenizations.simulate(