From 85bc8eb26afdfd7deb28ce2198eb3ef02181b95f Mon Sep 17 00:00:00 2001 From: Ben Batha Date: Mon, 8 Sep 2025 16:49:51 -0400 Subject: [PATCH 01/11] feat: normalize user agent with other do clients --- src/gradient/_base_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/gradient/_base_client.py b/src/gradient/_base_client.py index 74f3c57a..6f2db396 100644 --- a/src/gradient/_base_client.py +++ b/src/gradient/_base_client.py @@ -671,7 +671,7 @@ def _validate_headers( @property def user_agent(self) -> str: - return f"{self.__class__.__name__}/Python {self._version}" + return f"{self.__class__.__name__}/Python/{self._version}" @property def base_url(self) -> URL: From 5a6aa9241b5e7c2f4319caa14d62f41c0c824f9e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 8 Sep 2025 20:10:08 +0000 Subject: [PATCH 02/11] feat(types): replace List[str] with SequenceNotStr in params --- src/gradient/_utils/_transform.py | 6 +++ src/gradient/resources/agents/agents.py | 16 +++---- .../resources/agents/chat/completions.py | 28 +++++------ .../evaluation_metrics/workspaces/agents.py | 8 ++-- .../workspaces/workspaces.py | 8 ++-- .../resources/agents/evaluation_runs.py | 8 ++-- .../resources/agents/evaluation_test_cases.py | 8 ++-- src/gradient/resources/chat/completions.py | 28 +++++------ .../destroy_with_associated_resources.py | 24 +++++----- .../resources/gpu_droplets/firewalls/tags.py | 12 ++--- .../resources/gpu_droplets/gpu_droplets.py | 48 +++++++++---------- .../resources/gpu_droplets/images/images.py | 8 ++-- .../load_balancers/load_balancers.py | 28 +++++------ .../resources/gpu_droplets/volumes/actions.py | 20 ++++---- .../gpu_droplets/volumes/snapshots.py | 8 ++-- .../resources/gpu_droplets/volumes/volumes.py | 16 +++---- .../knowledge_bases/indexing_jobs.py | 8 ++-- .../knowledge_bases/knowledge_bases.py | 12 ++--- src/gradient/types/agent_create_params.py | 6 +-- src/gradient/types/agent_update_params.py | 4 +- .../agents/chat/completion_create_params.py | 14 +++--- .../workspace_create_params.py | 5 +- .../workspaces/agent_move_params.py | 4 +- .../agents/evaluation_run_create_params.py | 5 +- .../evaluation_test_case_create_params.py | 4 +- .../evaluation_test_case_update_params.py | 4 +- .../types/chat/completion_create_params.py | 14 +++--- .../types/gpu_droplet_create_params.py | 17 +++---- .../autoscale_pool_droplet_template_param.py | 7 +-- ...ciated_resource_delete_selective_params.py | 13 ++--- .../types/gpu_droplets/firewall_param.py | 5 +- .../gpu_droplets/firewalls/tag_add_params.py | 6 ++- .../firewalls/tag_remove_params.py | 6 ++- .../types/gpu_droplets/image_create_params.py | 6 ++- .../types/gpu_droplets/lb_firewall_param.py | 7 +-- .../load_balancer_create_params.py | 7 +-- .../load_balancer_update_params.py | 7 +-- .../gpu_droplets/volume_create_params.py | 8 ++-- .../volumes/action_initiate_by_id_params.py | 6 ++- .../volumes/action_initiate_by_name_params.py | 6 ++- .../volumes/snapshot_create_params.py | 6 ++- .../types/knowledge_base_create_params.py | 5 +- .../types/knowledge_base_update_params.py | 4 +- .../indexing_job_create_params.py | 5 +- .../shared_params/firewall_rule_target.py | 12 +++-- 45 files changed, 255 insertions(+), 232 deletions(-) diff --git a/src/gradient/_utils/_transform.py b/src/gradient/_utils/_transform.py index b0cc20a7..f0bcefd4 100644 --- a/src/gradient/_utils/_transform.py +++ b/src/gradient/_utils/_transform.py @@ -16,6 +16,7 @@ lru_cache, is_mapping, is_iterable, + is_sequence, ) from .._files import is_base64_file_input from ._typing import ( @@ -24,6 +25,7 @@ extract_type_arg, is_iterable_type, is_required_type, + is_sequence_type, is_annotated_type, strip_annotated_type, ) @@ -184,6 +186,8 @@ def _transform_recursive( (is_list_type(stripped_type) and is_list(data)) # Iterable[T] or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) + # Sequence[T] + or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str)) ): # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually # intended as an iterable, so we don't transform it. @@ -346,6 +350,8 @@ async def _async_transform_recursive( (is_list_type(stripped_type) and is_list(data)) # Iterable[T] or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) + # Sequence[T] + or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str)) ): # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually # intended as an iterable, so we don't transform it. diff --git a/src/gradient/resources/agents/agents.py b/src/gradient/resources/agents/agents.py index 67f7f4ae..8d06584c 100644 --- a/src/gradient/resources/agents/agents.py +++ b/src/gradient/resources/agents/agents.py @@ -2,8 +2,6 @@ from __future__ import annotations -from typing import List - import httpx from .routes import ( @@ -22,7 +20,7 @@ agent_update_params, agent_update_status_params, ) -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ..._utils import maybe_transform, async_maybe_transform from .api_keys import ( APIKeysResource, @@ -183,13 +181,13 @@ def create( anthropic_key_uuid: str | NotGiven = NOT_GIVEN, description: str | NotGiven = NOT_GIVEN, instruction: str | NotGiven = NOT_GIVEN, - knowledge_base_uuid: List[str] | NotGiven = NOT_GIVEN, + knowledge_base_uuid: SequenceNotStr[str] | NotGiven = NOT_GIVEN, model_uuid: str | NotGiven = NOT_GIVEN, name: str | NotGiven = NOT_GIVEN, openai_key_uuid: str | NotGiven = NOT_GIVEN, project_id: str | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - tags: List[str] | NotGiven = NOT_GIVEN, + tags: SequenceNotStr[str] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -313,7 +311,7 @@ def update( project_id: str | NotGiven = NOT_GIVEN, provide_citations: bool | NotGiven = NOT_GIVEN, retrieval_method: APIRetrievalMethod | NotGiven = NOT_GIVEN, - tags: List[str] | NotGiven = NOT_GIVEN, + tags: SequenceNotStr[str] | NotGiven = NOT_GIVEN, temperature: float | NotGiven = NOT_GIVEN, top_p: float | NotGiven = NOT_GIVEN, body_uuid: str | NotGiven = NOT_GIVEN, @@ -626,13 +624,13 @@ async def create( anthropic_key_uuid: str | NotGiven = NOT_GIVEN, description: str | NotGiven = NOT_GIVEN, instruction: str | NotGiven = NOT_GIVEN, - knowledge_base_uuid: List[str] | NotGiven = NOT_GIVEN, + knowledge_base_uuid: SequenceNotStr[str] | NotGiven = NOT_GIVEN, model_uuid: str | NotGiven = NOT_GIVEN, name: str | NotGiven = NOT_GIVEN, openai_key_uuid: str | NotGiven = NOT_GIVEN, project_id: str | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - tags: List[str] | NotGiven = NOT_GIVEN, + tags: SequenceNotStr[str] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -756,7 +754,7 @@ async def update( project_id: str | NotGiven = NOT_GIVEN, provide_citations: bool | NotGiven = NOT_GIVEN, retrieval_method: APIRetrievalMethod | NotGiven = NOT_GIVEN, - tags: List[str] | NotGiven = NOT_GIVEN, + tags: SequenceNotStr[str] | NotGiven = NOT_GIVEN, temperature: float | NotGiven = NOT_GIVEN, top_p: float | NotGiven = NOT_GIVEN, body_uuid: str | NotGiven = NOT_GIVEN, diff --git a/src/gradient/resources/agents/chat/completions.py b/src/gradient/resources/agents/chat/completions.py index 88d6c241..fb4523c1 100644 --- a/src/gradient/resources/agents/chat/completions.py +++ b/src/gradient/resources/agents/chat/completions.py @@ -2,12 +2,12 @@ from __future__ import annotations -from typing import Dict, List, Union, Iterable, Optional +from typing import Dict, Union, Iterable, Optional from typing_extensions import Literal, overload import httpx -from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ...._utils import required_args, maybe_transform, async_maybe_transform from ...._compat import cached_property from ...._resource import SyncAPIResource, AsyncAPIResource @@ -60,7 +60,7 @@ def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, @@ -189,8 +189,8 @@ def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN, tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN, @@ -317,8 +317,8 @@ def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN, tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN, @@ -447,7 +447,7 @@ def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, @@ -549,7 +549,7 @@ async def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, @@ -678,8 +678,8 @@ async def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN, tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN, @@ -806,8 +806,8 @@ async def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN, tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN, @@ -933,7 +933,7 @@ async def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, diff --git a/src/gradient/resources/agents/evaluation_metrics/workspaces/agents.py b/src/gradient/resources/agents/evaluation_metrics/workspaces/agents.py index 1a73bc60..408396b1 100644 --- a/src/gradient/resources/agents/evaluation_metrics/workspaces/agents.py +++ b/src/gradient/resources/agents/evaluation_metrics/workspaces/agents.py @@ -2,11 +2,9 @@ from __future__ import annotations -from typing import List - import httpx -from ....._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ....._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ....._utils import maybe_transform, async_maybe_transform from ....._compat import cached_property from ....._resource import SyncAPIResource, AsyncAPIResource @@ -104,7 +102,7 @@ def move( self, path_workspace_uuid: str, *, - agent_uuids: List[str] | NotGiven = NOT_GIVEN, + agent_uuids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, body_workspace_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -232,7 +230,7 @@ async def move( self, path_workspace_uuid: str, *, - agent_uuids: List[str] | NotGiven = NOT_GIVEN, + agent_uuids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, body_workspace_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. diff --git a/src/gradient/resources/agents/evaluation_metrics/workspaces/workspaces.py b/src/gradient/resources/agents/evaluation_metrics/workspaces/workspaces.py index a2cf5ebc..e6f610ef 100644 --- a/src/gradient/resources/agents/evaluation_metrics/workspaces/workspaces.py +++ b/src/gradient/resources/agents/evaluation_metrics/workspaces/workspaces.py @@ -2,8 +2,6 @@ from __future__ import annotations -from typing import List - import httpx from .agents import ( @@ -14,7 +12,7 @@ AgentsResourceWithStreamingResponse, AsyncAgentsResourceWithStreamingResponse, ) -from ....._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ....._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ....._utils import maybe_transform, async_maybe_transform from ....._compat import cached_property from ....._resource import SyncAPIResource, AsyncAPIResource @@ -65,7 +63,7 @@ def with_streaming_response(self) -> WorkspacesResourceWithStreamingResponse: def create( self, *, - agent_uuids: List[str] | NotGiven = NOT_GIVEN, + agent_uuids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, description: str | NotGiven = NOT_GIVEN, name: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -327,7 +325,7 @@ def with_streaming_response(self) -> AsyncWorkspacesResourceWithStreamingRespons async def create( self, *, - agent_uuids: List[str] | NotGiven = NOT_GIVEN, + agent_uuids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, description: str | NotGiven = NOT_GIVEN, name: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. diff --git a/src/gradient/resources/agents/evaluation_runs.py b/src/gradient/resources/agents/evaluation_runs.py index e55cc275..e00c9eb3 100644 --- a/src/gradient/resources/agents/evaluation_runs.py +++ b/src/gradient/resources/agents/evaluation_runs.py @@ -2,11 +2,9 @@ from __future__ import annotations -from typing import List - import httpx -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ..._utils import maybe_transform, async_maybe_transform from ..._compat import cached_property from ..._resource import SyncAPIResource, AsyncAPIResource @@ -49,7 +47,7 @@ def with_streaming_response(self) -> EvaluationRunsResourceWithStreamingResponse def create( self, *, - agent_uuids: List[str] | NotGiven = NOT_GIVEN, + agent_uuids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, run_name: str | NotGiven = NOT_GIVEN, test_case_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -251,7 +249,7 @@ def with_streaming_response(self) -> AsyncEvaluationRunsResourceWithStreamingRes async def create( self, *, - agent_uuids: List[str] | NotGiven = NOT_GIVEN, + agent_uuids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, run_name: str | NotGiven = NOT_GIVEN, test_case_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. diff --git a/src/gradient/resources/agents/evaluation_test_cases.py b/src/gradient/resources/agents/evaluation_test_cases.py index 454576c8..07f0a251 100644 --- a/src/gradient/resources/agents/evaluation_test_cases.py +++ b/src/gradient/resources/agents/evaluation_test_cases.py @@ -2,11 +2,9 @@ from __future__ import annotations -from typing import List - import httpx -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ..._utils import maybe_transform, async_maybe_transform from ..._compat import cached_property from ..._resource import SyncAPIResource, AsyncAPIResource @@ -60,7 +58,7 @@ def create( *, dataset_uuid: str | NotGiven = NOT_GIVEN, description: str | NotGiven = NOT_GIVEN, - metrics: List[str] | NotGiven = NOT_GIVEN, + metrics: SequenceNotStr[str] | NotGiven = NOT_GIVEN, name: str | NotGiven = NOT_GIVEN, star_metric: APIStarMetricParam | NotGiven = NOT_GIVEN, workspace_uuid: str | NotGiven = NOT_GIVEN, @@ -322,7 +320,7 @@ async def create( *, dataset_uuid: str | NotGiven = NOT_GIVEN, description: str | NotGiven = NOT_GIVEN, - metrics: List[str] | NotGiven = NOT_GIVEN, + metrics: SequenceNotStr[str] | NotGiven = NOT_GIVEN, name: str | NotGiven = NOT_GIVEN, star_metric: APIStarMetricParam | NotGiven = NOT_GIVEN, workspace_uuid: str | NotGiven = NOT_GIVEN, diff --git a/src/gradient/resources/chat/completions.py b/src/gradient/resources/chat/completions.py index 3a412b10..3017deb6 100644 --- a/src/gradient/resources/chat/completions.py +++ b/src/gradient/resources/chat/completions.py @@ -2,12 +2,12 @@ from __future__ import annotations -from typing import Dict, List, Union, Iterable, Optional +from typing import Dict, Union, Iterable, Optional from typing_extensions import Literal, overload import httpx -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ..._utils import required_args, maybe_transform, async_maybe_transform from ..._compat import cached_property from ..._resource import SyncAPIResource, AsyncAPIResource @@ -60,7 +60,7 @@ def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, @@ -190,8 +190,8 @@ def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN, tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN, @@ -318,8 +318,8 @@ def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN, tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN, @@ -445,7 +445,7 @@ def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, @@ -550,7 +550,7 @@ async def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, @@ -680,8 +680,8 @@ async def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN, tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN, @@ -808,8 +808,8 @@ async def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN, tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN, @@ -935,7 +935,7 @@ async def create( metadata: Optional[Dict[str, str]] | NotGiven = NOT_GIVEN, n: Optional[int] | NotGiven = NOT_GIVEN, presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, + stop: Union[Optional[str], SequenceNotStr[str], None] | NotGiven = NOT_GIVEN, stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, diff --git a/src/gradient/resources/gpu_droplets/destroy_with_associated_resources.py b/src/gradient/resources/gpu_droplets/destroy_with_associated_resources.py index 2f3b90cf..0d55cb48 100644 --- a/src/gradient/resources/gpu_droplets/destroy_with_associated_resources.py +++ b/src/gradient/resources/gpu_droplets/destroy_with_associated_resources.py @@ -2,11 +2,9 @@ from __future__ import annotations -from typing import List - import httpx -from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven +from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven, SequenceNotStr from ..._utils import maybe_transform, async_maybe_transform from ..._compat import cached_property from ..._resource import SyncAPIResource, AsyncAPIResource @@ -174,11 +172,11 @@ def delete_selective( self, droplet_id: int, *, - floating_ips: List[str] | NotGiven = NOT_GIVEN, - reserved_ips: List[str] | NotGiven = NOT_GIVEN, - snapshots: List[str] | NotGiven = NOT_GIVEN, - volume_snapshots: List[str] | NotGiven = NOT_GIVEN, - volumes: List[str] | NotGiven = NOT_GIVEN, + floating_ips: SequenceNotStr[str] | NotGiven = NOT_GIVEN, + reserved_ips: SequenceNotStr[str] | NotGiven = NOT_GIVEN, + snapshots: SequenceNotStr[str] | NotGiven = NOT_GIVEN, + volume_snapshots: SequenceNotStr[str] | NotGiven = NOT_GIVEN, + volumes: SequenceNotStr[str] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -430,11 +428,11 @@ async def delete_selective( self, droplet_id: int, *, - floating_ips: List[str] | NotGiven = NOT_GIVEN, - reserved_ips: List[str] | NotGiven = NOT_GIVEN, - snapshots: List[str] | NotGiven = NOT_GIVEN, - volume_snapshots: List[str] | NotGiven = NOT_GIVEN, - volumes: List[str] | NotGiven = NOT_GIVEN, + floating_ips: SequenceNotStr[str] | NotGiven = NOT_GIVEN, + reserved_ips: SequenceNotStr[str] | NotGiven = NOT_GIVEN, + snapshots: SequenceNotStr[str] | NotGiven = NOT_GIVEN, + volume_snapshots: SequenceNotStr[str] | NotGiven = NOT_GIVEN, + volumes: SequenceNotStr[str] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, diff --git a/src/gradient/resources/gpu_droplets/firewalls/tags.py b/src/gradient/resources/gpu_droplets/firewalls/tags.py index dc66c72f..ee13acd5 100644 --- a/src/gradient/resources/gpu_droplets/firewalls/tags.py +++ b/src/gradient/resources/gpu_droplets/firewalls/tags.py @@ -2,11 +2,11 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional import httpx -from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven +from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven, SequenceNotStr from ...._utils import maybe_transform, async_maybe_transform from ...._compat import cached_property from ...._resource import SyncAPIResource, AsyncAPIResource @@ -46,7 +46,7 @@ def add( self, firewall_id: str, *, - tags: Optional[List[str]], + tags: Optional[SequenceNotStr[str]], # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -95,7 +95,7 @@ def remove( self, firewall_id: str, *, - tags: Optional[List[str]], + tags: Optional[SequenceNotStr[str]], # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -165,7 +165,7 @@ async def add( self, firewall_id: str, *, - tags: Optional[List[str]], + tags: Optional[SequenceNotStr[str]], # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -214,7 +214,7 @@ async def remove( self, firewall_id: str, *, - tags: Optional[List[str]], + tags: Optional[SequenceNotStr[str]], # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, diff --git a/src/gradient/resources/gpu_droplets/gpu_droplets.py b/src/gradient/resources/gpu_droplets/gpu_droplets.py index 0ce55ba8..48a9e5fe 100644 --- a/src/gradient/resources/gpu_droplets/gpu_droplets.py +++ b/src/gradient/resources/gpu_droplets/gpu_droplets.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any, List, Union, Optional, cast +from typing import Any, Union, Optional, cast from typing_extensions import Literal, overload import httpx @@ -39,7 +39,7 @@ BackupsResourceWithStreamingResponse, AsyncBackupsResourceWithStreamingResponse, ) -from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven +from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven, SequenceNotStr from ..._utils import required_args, maybe_transform, async_maybe_transform from ..._compat import cached_property from .autoscale import ( @@ -215,10 +215,10 @@ def create( monitoring: bool | NotGiven = NOT_GIVEN, private_networking: bool | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - ssh_keys: List[Union[str, int]] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + ssh_keys: SequenceNotStr[Union[str, int]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, user_data: str | NotGiven = NOT_GIVEN, - volumes: List[str] | NotGiven = NOT_GIVEN, + volumes: SequenceNotStr[str] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, with_droplet_agent: bool | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -328,7 +328,7 @@ def create( self, *, image: Union[str, int], - names: List[str], + names: SequenceNotStr[str], size: str, backup_policy: DropletBackupPolicyParam | NotGiven = NOT_GIVEN, backups: bool | NotGiven = NOT_GIVEN, @@ -336,10 +336,10 @@ def create( monitoring: bool | NotGiven = NOT_GIVEN, private_networking: bool | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - ssh_keys: List[Union[str, int]] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + ssh_keys: SequenceNotStr[Union[str, int]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, user_data: str | NotGiven = NOT_GIVEN, - volumes: List[str] | NotGiven = NOT_GIVEN, + volumes: SequenceNotStr[str] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, with_droplet_agent: bool | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -458,13 +458,13 @@ def create( monitoring: bool | NotGiven = NOT_GIVEN, private_networking: bool | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - ssh_keys: List[Union[str, int]] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + ssh_keys: SequenceNotStr[Union[str, int]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, user_data: str | NotGiven = NOT_GIVEN, - volumes: List[str] | NotGiven = NOT_GIVEN, + volumes: SequenceNotStr[str] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, with_droplet_agent: bool | NotGiven = NOT_GIVEN, - names: List[str] | NotGiven = NOT_GIVEN, + names: SequenceNotStr[str] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -986,10 +986,10 @@ async def create( monitoring: bool | NotGiven = NOT_GIVEN, private_networking: bool | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - ssh_keys: List[Union[str, int]] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + ssh_keys: SequenceNotStr[Union[str, int]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, user_data: str | NotGiven = NOT_GIVEN, - volumes: List[str] | NotGiven = NOT_GIVEN, + volumes: SequenceNotStr[str] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, with_droplet_agent: bool | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -1099,7 +1099,7 @@ async def create( self, *, image: Union[str, int], - names: List[str], + names: SequenceNotStr[str], size: str, backup_policy: DropletBackupPolicyParam | NotGiven = NOT_GIVEN, backups: bool | NotGiven = NOT_GIVEN, @@ -1107,10 +1107,10 @@ async def create( monitoring: bool | NotGiven = NOT_GIVEN, private_networking: bool | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - ssh_keys: List[Union[str, int]] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + ssh_keys: SequenceNotStr[Union[str, int]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, user_data: str | NotGiven = NOT_GIVEN, - volumes: List[str] | NotGiven = NOT_GIVEN, + volumes: SequenceNotStr[str] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, with_droplet_agent: bool | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. @@ -1229,13 +1229,13 @@ async def create( monitoring: bool | NotGiven = NOT_GIVEN, private_networking: bool | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - ssh_keys: List[Union[str, int]] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + ssh_keys: SequenceNotStr[Union[str, int]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, user_data: str | NotGiven = NOT_GIVEN, - volumes: List[str] | NotGiven = NOT_GIVEN, + volumes: SequenceNotStr[str] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, with_droplet_agent: bool | NotGiven = NOT_GIVEN, - names: List[str] | NotGiven = NOT_GIVEN, + names: SequenceNotStr[str] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, diff --git a/src/gradient/resources/gpu_droplets/images/images.py b/src/gradient/resources/gpu_droplets/images/images.py index 09994263..1b00c024 100644 --- a/src/gradient/resources/gpu_droplets/images/images.py +++ b/src/gradient/resources/gpu_droplets/images/images.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import List, Union, Optional +from typing import Union, Optional from typing_extensions import Literal import httpx @@ -15,7 +15,7 @@ ActionsResourceWithStreamingResponse, AsyncActionsResourceWithStreamingResponse, ) -from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven +from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven, SequenceNotStr from ...._utils import maybe_transform, async_maybe_transform from ...._compat import cached_property from ...._resource import SyncAPIResource, AsyncAPIResource @@ -98,7 +98,7 @@ def create( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, url: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -464,7 +464,7 @@ async def create( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, url: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. diff --git a/src/gradient/resources/gpu_droplets/load_balancers/load_balancers.py b/src/gradient/resources/gpu_droplets/load_balancers/load_balancers.py index d876b50f..8f11a5da 100644 --- a/src/gradient/resources/gpu_droplets/load_balancers/load_balancers.py +++ b/src/gradient/resources/gpu_droplets/load_balancers/load_balancers.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import List, Iterable +from typing import Iterable from typing_extensions import Literal, overload import httpx @@ -15,7 +15,7 @@ DropletsResourceWithStreamingResponse, AsyncDropletsResourceWithStreamingResponse, ) -from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven +from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven, SequenceNotStr from ...._utils import required_args, maybe_transform, async_maybe_transform from ...._compat import cached_property from ...._resource import SyncAPIResource, AsyncAPIResource @@ -122,7 +122,7 @@ def create( size: Literal["lb-small", "lb-medium", "lb-large"] | NotGiven = NOT_GIVEN, size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -281,7 +281,7 @@ def create( size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, tag: str | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -441,7 +441,7 @@ def create( size: Literal["lb-small", "lb-medium", "lb-large"] | NotGiven = NOT_GIVEN, size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -571,7 +571,7 @@ def update( size: Literal["lb-small", "lb-medium", "lb-large"] | NotGiven = NOT_GIVEN, size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -725,7 +725,7 @@ def update( size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, tag: str | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -880,7 +880,7 @@ def update( size: Literal["lb-small", "lb-medium", "lb-large"] | NotGiven = NOT_GIVEN, size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -1134,7 +1134,7 @@ async def create( size: Literal["lb-small", "lb-medium", "lb-large"] | NotGiven = NOT_GIVEN, size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -1293,7 +1293,7 @@ async def create( size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, tag: str | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -1453,7 +1453,7 @@ async def create( size: Literal["lb-small", "lb-medium", "lb-large"] | NotGiven = NOT_GIVEN, size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -1583,7 +1583,7 @@ async def update( size: Literal["lb-small", "lb-medium", "lb-large"] | NotGiven = NOT_GIVEN, size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -1737,7 +1737,7 @@ async def update( size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, tag: str | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, @@ -1892,7 +1892,7 @@ async def update( size: Literal["lb-small", "lb-medium", "lb-large"] | NotGiven = NOT_GIVEN, size_unit: int | NotGiven = NOT_GIVEN, sticky_sessions: StickySessionsParam | NotGiven = NOT_GIVEN, - target_load_balancer_ids: List[str] | NotGiven = NOT_GIVEN, + target_load_balancer_ids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, tls_cipher_policy: Literal["DEFAULT", "STRONG"] | NotGiven = NOT_GIVEN, type: Literal["REGIONAL", "REGIONAL_NETWORK", "GLOBAL"] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, diff --git a/src/gradient/resources/gpu_droplets/volumes/actions.py b/src/gradient/resources/gpu_droplets/volumes/actions.py index 2e093136..c648beaa 100644 --- a/src/gradient/resources/gpu_droplets/volumes/actions.py +++ b/src/gradient/resources/gpu_droplets/volumes/actions.py @@ -2,12 +2,12 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional from typing_extensions import Literal, overload import httpx -from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ...._utils import required_args, maybe_transform, async_maybe_transform from ...._compat import cached_property from ...._resource import SyncAPIResource, AsyncAPIResource @@ -184,7 +184,7 @@ def initiate_by_id( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -474,7 +474,7 @@ def initiate_by_id( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, size_gigabytes: int | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -541,7 +541,7 @@ def initiate_by_name( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -719,7 +719,7 @@ def initiate_by_name( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -909,7 +909,7 @@ async def initiate_by_id( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -1199,7 +1199,7 @@ async def initiate_by_id( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, size_gigabytes: int | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -1266,7 +1266,7 @@ async def initiate_by_name( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -1444,7 +1444,7 @@ async def initiate_by_name( "syd1", ] | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, diff --git a/src/gradient/resources/gpu_droplets/volumes/snapshots.py b/src/gradient/resources/gpu_droplets/volumes/snapshots.py index 0f9e30fa..7e925264 100644 --- a/src/gradient/resources/gpu_droplets/volumes/snapshots.py +++ b/src/gradient/resources/gpu_droplets/volumes/snapshots.py @@ -2,11 +2,11 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional import httpx -from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven +from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven, SequenceNotStr from ...._utils import maybe_transform, async_maybe_transform from ...._compat import cached_property from ...._resource import SyncAPIResource, AsyncAPIResource @@ -50,7 +50,7 @@ def create( volume_id: str, *, name: str, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -251,7 +251,7 @@ async def create( volume_id: str, *, name: str, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, diff --git a/src/gradient/resources/gpu_droplets/volumes/volumes.py b/src/gradient/resources/gpu_droplets/volumes/volumes.py index ada4aedf..ee980a10 100644 --- a/src/gradient/resources/gpu_droplets/volumes/volumes.py +++ b/src/gradient/resources/gpu_droplets/volumes/volumes.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional from typing_extensions import Literal, overload import httpx @@ -15,7 +15,7 @@ ActionsResourceWithStreamingResponse, AsyncActionsResourceWithStreamingResponse, ) -from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven +from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven, SequenceNotStr from ...._utils import required_args, maybe_transform, async_maybe_transform from .snapshots import ( SnapshotsResource, @@ -97,7 +97,7 @@ def create( filesystem_label: str | NotGiven = NOT_GIVEN, filesystem_type: str | NotGiven = NOT_GIVEN, snapshot_id: str | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -182,7 +182,7 @@ def create( filesystem_label: str | NotGiven = NOT_GIVEN, filesystem_type: str | NotGiven = NOT_GIVEN, snapshot_id: str | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -267,7 +267,7 @@ def create( filesystem_label: str | NotGiven = NOT_GIVEN, filesystem_type: str | NotGiven = NOT_GIVEN, snapshot_id: str | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -590,7 +590,7 @@ async def create( filesystem_label: str | NotGiven = NOT_GIVEN, filesystem_type: str | NotGiven = NOT_GIVEN, snapshot_id: str | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -675,7 +675,7 @@ async def create( filesystem_label: str | NotGiven = NOT_GIVEN, filesystem_type: str | NotGiven = NOT_GIVEN, snapshot_id: str | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -760,7 +760,7 @@ async def create( filesystem_label: str | NotGiven = NOT_GIVEN, filesystem_type: str | NotGiven = NOT_GIVEN, snapshot_id: str | NotGiven = NOT_GIVEN, - tags: Optional[List[str]] | NotGiven = NOT_GIVEN, + tags: Optional[SequenceNotStr[str]] | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, diff --git a/src/gradient/resources/knowledge_bases/indexing_jobs.py b/src/gradient/resources/knowledge_bases/indexing_jobs.py index 723b42f5..41e7da76 100644 --- a/src/gradient/resources/knowledge_bases/indexing_jobs.py +++ b/src/gradient/resources/knowledge_bases/indexing_jobs.py @@ -2,11 +2,9 @@ from __future__ import annotations -from typing import List - import httpx -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ..._utils import maybe_transform, async_maybe_transform from ..._compat import cached_property from ..._resource import SyncAPIResource, AsyncAPIResource @@ -54,7 +52,7 @@ def with_streaming_response(self) -> IndexingJobsResourceWithStreamingResponse: def create( self, *, - data_source_uuids: List[str] | NotGiven = NOT_GIVEN, + data_source_uuids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, knowledge_base_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -285,7 +283,7 @@ def with_streaming_response(self) -> AsyncIndexingJobsResourceWithStreamingRespo async def create( self, *, - data_source_uuids: List[str] | NotGiven = NOT_GIVEN, + data_source_uuids: SequenceNotStr[str] | NotGiven = NOT_GIVEN, knowledge_base_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. diff --git a/src/gradient/resources/knowledge_bases/knowledge_bases.py b/src/gradient/resources/knowledge_bases/knowledge_bases.py index 594b2ba7..61fc85a8 100644 --- a/src/gradient/resources/knowledge_bases/knowledge_bases.py +++ b/src/gradient/resources/knowledge_bases/knowledge_bases.py @@ -2,12 +2,12 @@ from __future__ import annotations -from typing import List, Iterable +from typing import Iterable import httpx from ...types import knowledge_base_list_params, knowledge_base_create_params, knowledge_base_update_params -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ..._utils import maybe_transform, async_maybe_transform from ..._compat import cached_property from ..._resource import SyncAPIResource, AsyncAPIResource @@ -80,7 +80,7 @@ def create( name: str | NotGiven = NOT_GIVEN, project_id: str | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - tags: List[str] | NotGiven = NOT_GIVEN, + tags: SequenceNotStr[str] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -189,7 +189,7 @@ def update( embedding_model_uuid: str | NotGiven = NOT_GIVEN, name: str | NotGiven = NOT_GIVEN, project_id: str | NotGiven = NOT_GIVEN, - tags: List[str] | NotGiven = NOT_GIVEN, + tags: SequenceNotStr[str] | NotGiven = NOT_GIVEN, body_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -368,7 +368,7 @@ async def create( name: str | NotGiven = NOT_GIVEN, project_id: str | NotGiven = NOT_GIVEN, region: str | NotGiven = NOT_GIVEN, - tags: List[str] | NotGiven = NOT_GIVEN, + tags: SequenceNotStr[str] | NotGiven = NOT_GIVEN, vpc_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. @@ -477,7 +477,7 @@ async def update( embedding_model_uuid: str | NotGiven = NOT_GIVEN, name: str | NotGiven = NOT_GIVEN, project_id: str | NotGiven = NOT_GIVEN, - tags: List[str] | NotGiven = NOT_GIVEN, + tags: SequenceNotStr[str] | NotGiven = NOT_GIVEN, body_uuid: str | NotGiven = NOT_GIVEN, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. diff --git a/src/gradient/types/agent_create_params.py b/src/gradient/types/agent_create_params.py index 68ebd227..db84a258 100644 --- a/src/gradient/types/agent_create_params.py +++ b/src/gradient/types/agent_create_params.py @@ -2,9 +2,9 @@ from __future__ import annotations -from typing import List from typing_extensions import Annotated, TypedDict +from .._types import SequenceNotStr from .._utils import PropertyInfo __all__ = ["AgentCreateParams"] @@ -25,7 +25,7 @@ class AgentCreateParams(TypedDict, total=False): for best practices. """ - knowledge_base_uuid: List[str] + knowledge_base_uuid: SequenceNotStr[str] """Ids of the knowledge base(s) to attach to the agent""" model_uuid: str @@ -43,5 +43,5 @@ class AgentCreateParams(TypedDict, total=False): region: str """The DigitalOcean region to deploy your agent in""" - tags: List[str] + tags: SequenceNotStr[str] """Agent tag to organize related resources""" diff --git a/src/gradient/types/agent_update_params.py b/src/gradient/types/agent_update_params.py index c26bf833..75c30cba 100644 --- a/src/gradient/types/agent_update_params.py +++ b/src/gradient/types/agent_update_params.py @@ -2,9 +2,9 @@ from __future__ import annotations -from typing import List from typing_extensions import Annotated, TypedDict +from .._types import SequenceNotStr from .._utils import PropertyInfo from .api_retrieval_method import APIRetrievalMethod @@ -64,7 +64,7 @@ class AgentUpdateParams(TypedDict, total=False): - RETRIEVAL_METHOD_NONE: The retrieval method is none """ - tags: List[str] + tags: SequenceNotStr[str] """A set of abitrary tags to organize your agent""" temperature: float diff --git a/src/gradient/types/agents/chat/completion_create_params.py b/src/gradient/types/agents/chat/completion_create_params.py index aaec2ba5..d8cf7bc1 100644 --- a/src/gradient/types/agents/chat/completion_create_params.py +++ b/src/gradient/types/agents/chat/completion_create_params.py @@ -2,9 +2,11 @@ from __future__ import annotations -from typing import Dict, List, Union, Iterable, Optional +from typing import Dict, Union, Iterable, Optional from typing_extensions import Literal, Required, TypeAlias, TypedDict +from ...._types import SequenceNotStr + __all__ = [ "CompletionCreateParamsBase", "Message", @@ -96,7 +98,7 @@ class CompletionCreateParamsBase(TypedDict, total=False): far, increasing the model's likelihood to talk about new topics. """ - stop: Union[Optional[str], List[str], None] + stop: Union[Optional[str], SequenceNotStr[str], None] """Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. @@ -156,7 +158,7 @@ class CompletionCreateParamsBase(TypedDict, total=False): class MessageChatCompletionRequestSystemMessage(TypedDict, total=False): - content: Required[Union[str, List[str]]] + content: Required[Union[str, SequenceNotStr[str]]] """The contents of the system message.""" role: Required[Literal["system"]] @@ -164,7 +166,7 @@ class MessageChatCompletionRequestSystemMessage(TypedDict, total=False): class MessageChatCompletionRequestDeveloperMessage(TypedDict, total=False): - content: Required[Union[str, List[str]]] + content: Required[Union[str, SequenceNotStr[str]]] """The contents of the developer message.""" role: Required[Literal["developer"]] @@ -172,7 +174,7 @@ class MessageChatCompletionRequestDeveloperMessage(TypedDict, total=False): class MessageChatCompletionRequestUserMessage(TypedDict, total=False): - content: Required[Union[str, List[str]]] + content: Required[Union[str, SequenceNotStr[str]]] """The contents of the user message.""" role: Required[Literal["user"]] @@ -207,7 +209,7 @@ class MessageChatCompletionRequestAssistantMessage(TypedDict, total=False): role: Required[Literal["assistant"]] """The role of the messages author, in this case `assistant`.""" - content: Union[str, List[str], None] + content: Union[str, SequenceNotStr[str], None] """The contents of the assistant message.""" tool_calls: Iterable[MessageChatCompletionRequestAssistantMessageToolCall] diff --git a/src/gradient/types/agents/evaluation_metrics/workspace_create_params.py b/src/gradient/types/agents/evaluation_metrics/workspace_create_params.py index 7a418e81..443a6f43 100644 --- a/src/gradient/types/agents/evaluation_metrics/workspace_create_params.py +++ b/src/gradient/types/agents/evaluation_metrics/workspace_create_params.py @@ -2,14 +2,15 @@ from __future__ import annotations -from typing import List from typing_extensions import TypedDict +from ...._types import SequenceNotStr + __all__ = ["WorkspaceCreateParams"] class WorkspaceCreateParams(TypedDict, total=False): - agent_uuids: List[str] + agent_uuids: SequenceNotStr[str] """Ids of the agents(s) to attach to the workspace""" description: str diff --git a/src/gradient/types/agents/evaluation_metrics/workspaces/agent_move_params.py b/src/gradient/types/agents/evaluation_metrics/workspaces/agent_move_params.py index 74e27dd2..7b451084 100644 --- a/src/gradient/types/agents/evaluation_metrics/workspaces/agent_move_params.py +++ b/src/gradient/types/agents/evaluation_metrics/workspaces/agent_move_params.py @@ -2,16 +2,16 @@ from __future__ import annotations -from typing import List from typing_extensions import Annotated, TypedDict +from ....._types import SequenceNotStr from ....._utils import PropertyInfo __all__ = ["AgentMoveParams"] class AgentMoveParams(TypedDict, total=False): - agent_uuids: List[str] + agent_uuids: SequenceNotStr[str] """Agent uuids""" body_workspace_uuid: Annotated[str, PropertyInfo(alias="workspace_uuid")] diff --git a/src/gradient/types/agents/evaluation_run_create_params.py b/src/gradient/types/agents/evaluation_run_create_params.py index 3029e192..52bbee85 100644 --- a/src/gradient/types/agents/evaluation_run_create_params.py +++ b/src/gradient/types/agents/evaluation_run_create_params.py @@ -2,14 +2,15 @@ from __future__ import annotations -from typing import List from typing_extensions import TypedDict +from ..._types import SequenceNotStr + __all__ = ["EvaluationRunCreateParams"] class EvaluationRunCreateParams(TypedDict, total=False): - agent_uuids: List[str] + agent_uuids: SequenceNotStr[str] """Agent UUIDs to run the test case against.""" run_name: str diff --git a/src/gradient/types/agents/evaluation_test_case_create_params.py b/src/gradient/types/agents/evaluation_test_case_create_params.py index 51ce20c7..af49d024 100644 --- a/src/gradient/types/agents/evaluation_test_case_create_params.py +++ b/src/gradient/types/agents/evaluation_test_case_create_params.py @@ -2,9 +2,9 @@ from __future__ import annotations -from typing import List from typing_extensions import TypedDict +from ..._types import SequenceNotStr from .api_star_metric_param import APIStarMetricParam __all__ = ["EvaluationTestCaseCreateParams"] @@ -17,7 +17,7 @@ class EvaluationTestCaseCreateParams(TypedDict, total=False): description: str """Description of the test case.""" - metrics: List[str] + metrics: SequenceNotStr[str] """Full metric list to use for evaluation test case.""" name: str diff --git a/src/gradient/types/agents/evaluation_test_case_update_params.py b/src/gradient/types/agents/evaluation_test_case_update_params.py index 825f961b..d707d909 100644 --- a/src/gradient/types/agents/evaluation_test_case_update_params.py +++ b/src/gradient/types/agents/evaluation_test_case_update_params.py @@ -2,9 +2,9 @@ from __future__ import annotations -from typing import List from typing_extensions import Annotated, TypedDict +from ..._types import SequenceNotStr from ..._utils import PropertyInfo from .api_star_metric_param import APIStarMetricParam @@ -30,4 +30,4 @@ class EvaluationTestCaseUpdateParams(TypedDict, total=False): class Metrics(TypedDict, total=False): - metric_uuids: List[str] + metric_uuids: SequenceNotStr[str] diff --git a/src/gradient/types/chat/completion_create_params.py b/src/gradient/types/chat/completion_create_params.py index aaec2ba5..17f00242 100644 --- a/src/gradient/types/chat/completion_create_params.py +++ b/src/gradient/types/chat/completion_create_params.py @@ -2,9 +2,11 @@ from __future__ import annotations -from typing import Dict, List, Union, Iterable, Optional +from typing import Dict, Union, Iterable, Optional from typing_extensions import Literal, Required, TypeAlias, TypedDict +from ..._types import SequenceNotStr + __all__ = [ "CompletionCreateParamsBase", "Message", @@ -96,7 +98,7 @@ class CompletionCreateParamsBase(TypedDict, total=False): far, increasing the model's likelihood to talk about new topics. """ - stop: Union[Optional[str], List[str], None] + stop: Union[Optional[str], SequenceNotStr[str], None] """Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. @@ -156,7 +158,7 @@ class CompletionCreateParamsBase(TypedDict, total=False): class MessageChatCompletionRequestSystemMessage(TypedDict, total=False): - content: Required[Union[str, List[str]]] + content: Required[Union[str, SequenceNotStr[str]]] """The contents of the system message.""" role: Required[Literal["system"]] @@ -164,7 +166,7 @@ class MessageChatCompletionRequestSystemMessage(TypedDict, total=False): class MessageChatCompletionRequestDeveloperMessage(TypedDict, total=False): - content: Required[Union[str, List[str]]] + content: Required[Union[str, SequenceNotStr[str]]] """The contents of the developer message.""" role: Required[Literal["developer"]] @@ -172,7 +174,7 @@ class MessageChatCompletionRequestDeveloperMessage(TypedDict, total=False): class MessageChatCompletionRequestUserMessage(TypedDict, total=False): - content: Required[Union[str, List[str]]] + content: Required[Union[str, SequenceNotStr[str]]] """The contents of the user message.""" role: Required[Literal["user"]] @@ -207,7 +209,7 @@ class MessageChatCompletionRequestAssistantMessage(TypedDict, total=False): role: Required[Literal["assistant"]] """The role of the messages author, in this case `assistant`.""" - content: Union[str, List[str], None] + content: Union[str, SequenceNotStr[str], None] """The contents of the assistant message.""" tool_calls: Iterable[MessageChatCompletionRequestAssistantMessageToolCall] diff --git a/src/gradient/types/gpu_droplet_create_params.py b/src/gradient/types/gpu_droplet_create_params.py index f38661fb..96403479 100644 --- a/src/gradient/types/gpu_droplet_create_params.py +++ b/src/gradient/types/gpu_droplet_create_params.py @@ -2,9 +2,10 @@ from __future__ import annotations -from typing import List, Union, Optional +from typing import Union, Optional from typing_extensions import Required, TypeAlias, TypedDict +from .._types import SequenceNotStr from .droplet_backup_policy_param import DropletBackupPolicyParam __all__ = ["GPUDropletCreateParams", "DropletSingleCreate", "DropletMultiCreate"] @@ -65,14 +66,14 @@ class DropletSingleCreate(TypedDict, total=False): the Droplet may deploy in any region. """ - ssh_keys: List[Union[str, int]] + ssh_keys: SequenceNotStr[Union[str, int]] """ An array containing the IDs or fingerprints of the SSH keys that you wish to embed in the Droplet's root account upon creation. You must add the keys to your team before they can be embedded on a Droplet. Requires `ssh_key:read` scope. """ - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to apply to the Droplet after it is created. @@ -86,7 +87,7 @@ class DropletSingleCreate(TypedDict, total=False): and may not exceed 64 KiB in size. """ - volumes: List[str] + volumes: SequenceNotStr[str] """ An array of IDs for block storage volumes that will be attached to the Droplet once created. The volumes must not already be attached to an existing Droplet. @@ -118,7 +119,7 @@ class DropletMultiCreate(TypedDict, total=False): scope. """ - names: Required[List[str]] + names: Required[SequenceNotStr[str]] """ An array of human human-readable strings you wish to use when displaying the Droplet name. Each name, if set to a domain name managed in the DigitalOcean DNS @@ -165,14 +166,14 @@ class DropletMultiCreate(TypedDict, total=False): the Droplet may deploy in any region. """ - ssh_keys: List[Union[str, int]] + ssh_keys: SequenceNotStr[Union[str, int]] """ An array containing the IDs or fingerprints of the SSH keys that you wish to embed in the Droplet's root account upon creation. You must add the keys to your team before they can be embedded on a Droplet. Requires `ssh_key:read` scope. """ - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to apply to the Droplet after it is created. @@ -186,7 +187,7 @@ class DropletMultiCreate(TypedDict, total=False): and may not exceed 64 KiB in size. """ - volumes: List[str] + volumes: SequenceNotStr[str] """ An array of IDs for block storage volumes that will be attached to the Droplet once created. The volumes must not already be attached to an existing Droplet. diff --git a/src/gradient/types/gpu_droplets/autoscale_pool_droplet_template_param.py b/src/gradient/types/gpu_droplets/autoscale_pool_droplet_template_param.py index c491ed55..3eb8ac89 100644 --- a/src/gradient/types/gpu_droplets/autoscale_pool_droplet_template_param.py +++ b/src/gradient/types/gpu_droplets/autoscale_pool_droplet_template_param.py @@ -2,9 +2,10 @@ from __future__ import annotations -from typing import List from typing_extensions import Literal, Required, TypedDict +from ..._types import SequenceNotStr + __all__ = ["AutoscalePoolDropletTemplateParam"] @@ -38,7 +39,7 @@ class AutoscalePoolDropletTemplateParam(TypedDict, total=False): size: Required[str] """The Droplet size to be used for all Droplets in the autoscale pool.""" - ssh_keys: Required[List[str]] + ssh_keys: Required[SequenceNotStr[str]] """The SSH keys to be installed on the Droplets in the autoscale pool. You can either specify the key ID or the fingerprint. Requires `ssh_key:read` @@ -57,7 +58,7 @@ class AutoscalePoolDropletTemplateParam(TypedDict, total=False): `project:read` scope. """ - tags: List[str] + tags: SequenceNotStr[str] """ The tags to apply to each of the Droplets in the autoscale pool. Requires `tag:read` scope. diff --git a/src/gradient/types/gpu_droplets/destroy_with_associated_resource_delete_selective_params.py b/src/gradient/types/gpu_droplets/destroy_with_associated_resource_delete_selective_params.py index f4037b6b..9a9730e7 100644 --- a/src/gradient/types/gpu_droplets/destroy_with_associated_resource_delete_selective_params.py +++ b/src/gradient/types/gpu_droplets/destroy_with_associated_resource_delete_selective_params.py @@ -2,33 +2,34 @@ from __future__ import annotations -from typing import List from typing_extensions import TypedDict +from ..._types import SequenceNotStr + __all__ = ["DestroyWithAssociatedResourceDeleteSelectiveParams"] class DestroyWithAssociatedResourceDeleteSelectiveParams(TypedDict, total=False): - floating_ips: List[str] + floating_ips: SequenceNotStr[str] """ An array of unique identifiers for the floating IPs to be scheduled for deletion. """ - reserved_ips: List[str] + reserved_ips: SequenceNotStr[str] """ An array of unique identifiers for the reserved IPs to be scheduled for deletion. """ - snapshots: List[str] + snapshots: SequenceNotStr[str] """An array of unique identifiers for the snapshots to be scheduled for deletion.""" - volume_snapshots: List[str] + volume_snapshots: SequenceNotStr[str] """ An array of unique identifiers for the volume snapshots to be scheduled for deletion. """ - volumes: List[str] + volumes: SequenceNotStr[str] """An array of unique identifiers for the volumes to be scheduled for deletion.""" diff --git a/src/gradient/types/gpu_droplets/firewall_param.py b/src/gradient/types/gpu_droplets/firewall_param.py index 1be9cf6a..8b5a5a15 100644 --- a/src/gradient/types/gpu_droplets/firewall_param.py +++ b/src/gradient/types/gpu_droplets/firewall_param.py @@ -2,9 +2,10 @@ from __future__ import annotations -from typing import List, Iterable, Optional +from typing import Iterable, Optional from typing_extensions import Literal, Required, TypedDict +from ..._types import SequenceNotStr from ..shared_params.firewall_rule_target import FirewallRuleTarget __all__ = ["FirewallParam", "InboundRule", "OutboundRule"] @@ -58,7 +59,7 @@ class FirewallParam(TypedDict, total=False): outbound_rules: Optional[Iterable[OutboundRule]] - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to be applied to the resource. Tag names must exist in order to be referenced in a request. diff --git a/src/gradient/types/gpu_droplets/firewalls/tag_add_params.py b/src/gradient/types/gpu_droplets/firewalls/tag_add_params.py index 63af7640..c3b9696e 100644 --- a/src/gradient/types/gpu_droplets/firewalls/tag_add_params.py +++ b/src/gradient/types/gpu_droplets/firewalls/tag_add_params.py @@ -2,14 +2,16 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional from typing_extensions import Required, TypedDict +from ...._types import SequenceNotStr + __all__ = ["TagAddParams"] class TagAddParams(TypedDict, total=False): - tags: Required[Optional[List[str]]] + tags: Required[Optional[SequenceNotStr[str]]] """A flat array of tag names as strings to be applied to the resource. Tag names must exist in order to be referenced in a request. diff --git a/src/gradient/types/gpu_droplets/firewalls/tag_remove_params.py b/src/gradient/types/gpu_droplets/firewalls/tag_remove_params.py index 91a3e382..bdd848f3 100644 --- a/src/gradient/types/gpu_droplets/firewalls/tag_remove_params.py +++ b/src/gradient/types/gpu_droplets/firewalls/tag_remove_params.py @@ -2,14 +2,16 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional from typing_extensions import Required, TypedDict +from ...._types import SequenceNotStr + __all__ = ["TagRemoveParams"] class TagRemoveParams(TypedDict, total=False): - tags: Required[Optional[List[str]]] + tags: Required[Optional[SequenceNotStr[str]]] """A flat array of tag names as strings to be applied to the resource. Tag names must exist in order to be referenced in a request. diff --git a/src/gradient/types/gpu_droplets/image_create_params.py b/src/gradient/types/gpu_droplets/image_create_params.py index efbd684c..baae3bf5 100644 --- a/src/gradient/types/gpu_droplets/image_create_params.py +++ b/src/gradient/types/gpu_droplets/image_create_params.py @@ -2,9 +2,11 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional from typing_extensions import Literal, TypedDict +from ..._types import SequenceNotStr + __all__ = ["ImageCreateParams"] @@ -64,7 +66,7 @@ class ImageCreateParams(TypedDict, total=False): available. """ - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to be applied to the resource. Tag names may be for either existing or new tags. diff --git a/src/gradient/types/gpu_droplets/lb_firewall_param.py b/src/gradient/types/gpu_droplets/lb_firewall_param.py index 6f1dcf10..7d54a048 100644 --- a/src/gradient/types/gpu_droplets/lb_firewall_param.py +++ b/src/gradient/types/gpu_droplets/lb_firewall_param.py @@ -2,20 +2,21 @@ from __future__ import annotations -from typing import List from typing_extensions import TypedDict +from ..._types import SequenceNotStr + __all__ = ["LbFirewallParam"] class LbFirewallParam(TypedDict, total=False): - allow: List[str] + allow: SequenceNotStr[str] """ the rules for allowing traffic to the load balancer (in the form 'ip:1.2.3.4' or 'cidr:1.2.0.0/16') """ - deny: List[str] + deny: SequenceNotStr[str] """ the rules for denying traffic to the load balancer (in the form 'ip:1.2.3.4' or 'cidr:1.2.0.0/16') diff --git a/src/gradient/types/gpu_droplets/load_balancer_create_params.py b/src/gradient/types/gpu_droplets/load_balancer_create_params.py index a87d9148..06472c78 100644 --- a/src/gradient/types/gpu_droplets/load_balancer_create_params.py +++ b/src/gradient/types/gpu_droplets/load_balancer_create_params.py @@ -2,9 +2,10 @@ from __future__ import annotations -from typing import List, Union, Iterable +from typing import Union, Iterable from typing_extensions import Literal, Required, TypeAlias, TypedDict +from ..._types import SequenceNotStr from .domains_param import DomainsParam from .lb_firewall_param import LbFirewallParam from .glb_settings_param import GlbSettingsParam @@ -148,7 +149,7 @@ class AssignDropletsByID(TypedDict, total=False): sticky_sessions: StickySessionsParam """An object specifying sticky sessions settings for the load balancer.""" - target_load_balancer_ids: List[str] + target_load_balancer_ids: SequenceNotStr[str] """ An array containing the UUIDs of the Regional load balancers to be used as target backends for a Global load balancer. @@ -308,7 +309,7 @@ class AssignDropletsByTag(TypedDict, total=False): balancer. """ - target_load_balancer_ids: List[str] + target_load_balancer_ids: SequenceNotStr[str] """ An array containing the UUIDs of the Regional load balancers to be used as target backends for a Global load balancer. diff --git a/src/gradient/types/gpu_droplets/load_balancer_update_params.py b/src/gradient/types/gpu_droplets/load_balancer_update_params.py index 9a1906cb..01c2bda5 100644 --- a/src/gradient/types/gpu_droplets/load_balancer_update_params.py +++ b/src/gradient/types/gpu_droplets/load_balancer_update_params.py @@ -2,9 +2,10 @@ from __future__ import annotations -from typing import List, Union, Iterable +from typing import Union, Iterable from typing_extensions import Literal, Required, TypeAlias, TypedDict +from ..._types import SequenceNotStr from .domains_param import DomainsParam from .lb_firewall_param import LbFirewallParam from .glb_settings_param import GlbSettingsParam @@ -148,7 +149,7 @@ class AssignDropletsByID(TypedDict, total=False): sticky_sessions: StickySessionsParam """An object specifying sticky sessions settings for the load balancer.""" - target_load_balancer_ids: List[str] + target_load_balancer_ids: SequenceNotStr[str] """ An array containing the UUIDs of the Regional load balancers to be used as target backends for a Global load balancer. @@ -308,7 +309,7 @@ class AssignDropletsByTag(TypedDict, total=False): balancer. """ - target_load_balancer_ids: List[str] + target_load_balancer_ids: SequenceNotStr[str] """ An array containing the UUIDs of the Regional load balancers to be used as target backends for a Global load balancer. diff --git a/src/gradient/types/gpu_droplets/volume_create_params.py b/src/gradient/types/gpu_droplets/volume_create_params.py index fc889801..c58f7f9d 100644 --- a/src/gradient/types/gpu_droplets/volume_create_params.py +++ b/src/gradient/types/gpu_droplets/volume_create_params.py @@ -2,9 +2,11 @@ from __future__ import annotations -from typing import List, Union, Optional +from typing import Union, Optional from typing_extensions import Literal, Required, TypeAlias, TypedDict +from ..._types import SequenceNotStr + __all__ = ["VolumeCreateParams", "VolumesExt4", "VolumesXfs"] @@ -70,7 +72,7 @@ class VolumesExt4(TypedDict, total=False): snapshot_id: str """The unique identifier for the volume snapshot from which to create the volume.""" - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to be applied to the resource. Tag names may be for either existing or new tags. @@ -141,7 +143,7 @@ class VolumesXfs(TypedDict, total=False): snapshot_id: str """The unique identifier for the volume snapshot from which to create the volume.""" - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to be applied to the resource. Tag names may be for either existing or new tags. diff --git a/src/gradient/types/gpu_droplets/volumes/action_initiate_by_id_params.py b/src/gradient/types/gpu_droplets/volumes/action_initiate_by_id_params.py index 6d41d463..bf1869af 100644 --- a/src/gradient/types/gpu_droplets/volumes/action_initiate_by_id_params.py +++ b/src/gradient/types/gpu_droplets/volumes/action_initiate_by_id_params.py @@ -2,9 +2,11 @@ from __future__ import annotations -from typing import List, Union, Optional +from typing import Union, Optional from typing_extensions import Literal, Required, TypeAlias, TypedDict +from ...._types import SequenceNotStr + __all__ = ["ActionInitiateByIDParams", "VolumeActionPostAttach", "VolumeActionPostDetach", "VolumeActionPostResize"] @@ -46,7 +48,7 @@ class VolumeActionPostAttach(TypedDict, total=False): available. """ - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to be applied to the resource. Tag names may be for either existing or new tags. diff --git a/src/gradient/types/gpu_droplets/volumes/action_initiate_by_name_params.py b/src/gradient/types/gpu_droplets/volumes/action_initiate_by_name_params.py index d1a7d084..f37d6d9a 100644 --- a/src/gradient/types/gpu_droplets/volumes/action_initiate_by_name_params.py +++ b/src/gradient/types/gpu_droplets/volumes/action_initiate_by_name_params.py @@ -2,9 +2,11 @@ from __future__ import annotations -from typing import List, Union, Optional +from typing import Union, Optional from typing_extensions import Literal, Required, TypeAlias, TypedDict +from ...._types import SequenceNotStr + __all__ = ["ActionInitiateByNameParams", "VolumeActionPostAttach", "VolumeActionPostDetach"] @@ -46,7 +48,7 @@ class VolumeActionPostAttach(TypedDict, total=False): available. """ - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to be applied to the resource. Tag names may be for either existing or new tags. diff --git a/src/gradient/types/gpu_droplets/volumes/snapshot_create_params.py b/src/gradient/types/gpu_droplets/volumes/snapshot_create_params.py index 8cce4a59..890dd302 100644 --- a/src/gradient/types/gpu_droplets/volumes/snapshot_create_params.py +++ b/src/gradient/types/gpu_droplets/volumes/snapshot_create_params.py @@ -2,9 +2,11 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional from typing_extensions import Required, TypedDict +from ...._types import SequenceNotStr + __all__ = ["SnapshotCreateParams"] @@ -12,7 +14,7 @@ class SnapshotCreateParams(TypedDict, total=False): name: Required[str] """A human-readable name for the volume snapshot.""" - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to be applied to the resource. Tag names may be for either existing or new tags. diff --git a/src/gradient/types/knowledge_base_create_params.py b/src/gradient/types/knowledge_base_create_params.py index 5c0df9a6..e40bd598 100644 --- a/src/gradient/types/knowledge_base_create_params.py +++ b/src/gradient/types/knowledge_base_create_params.py @@ -2,9 +2,10 @@ from __future__ import annotations -from typing import List, Iterable +from typing import Iterable from typing_extensions import TypedDict +from .._types import SequenceNotStr from .knowledge_bases.aws_data_source_param import AwsDataSourceParam from .knowledge_bases.api_spaces_data_source_param import APISpacesDataSourceParam from .knowledge_bases.api_file_upload_data_source_param import APIFileUploadDataSourceParam @@ -44,7 +45,7 @@ class KnowledgeBaseCreateParams(TypedDict, total=False): region: str """The datacenter region to deploy the knowledge base in.""" - tags: List[str] + tags: SequenceNotStr[str] """Tags to organize your knowledge base.""" vpc_uuid: str diff --git a/src/gradient/types/knowledge_base_update_params.py b/src/gradient/types/knowledge_base_update_params.py index 7a86b40c..cfb52016 100644 --- a/src/gradient/types/knowledge_base_update_params.py +++ b/src/gradient/types/knowledge_base_update_params.py @@ -2,9 +2,9 @@ from __future__ import annotations -from typing import List from typing_extensions import Annotated, TypedDict +from .._types import SequenceNotStr from .._utils import PropertyInfo __all__ = ["KnowledgeBaseUpdateParams"] @@ -23,7 +23,7 @@ class KnowledgeBaseUpdateParams(TypedDict, total=False): project_id: str """The id of the DigitalOcean project this knowledge base will belong to""" - tags: List[str] + tags: SequenceNotStr[str] """Tags to organize your knowledge base.""" body_uuid: Annotated[str, PropertyInfo(alias="uuid")] diff --git a/src/gradient/types/knowledge_bases/indexing_job_create_params.py b/src/gradient/types/knowledge_bases/indexing_job_create_params.py index d92c5790..ebd8632b 100644 --- a/src/gradient/types/knowledge_bases/indexing_job_create_params.py +++ b/src/gradient/types/knowledge_bases/indexing_job_create_params.py @@ -2,14 +2,15 @@ from __future__ import annotations -from typing import List from typing_extensions import TypedDict +from ..._types import SequenceNotStr + __all__ = ["IndexingJobCreateParams"] class IndexingJobCreateParams(TypedDict, total=False): - data_source_uuids: List[str] + data_source_uuids: SequenceNotStr[str] """ List of data source ids to index, if none are provided, all data sources will be indexed diff --git a/src/gradient/types/shared_params/firewall_rule_target.py b/src/gradient/types/shared_params/firewall_rule_target.py index 49a5f75c..7f317f6c 100644 --- a/src/gradient/types/shared_params/firewall_rule_target.py +++ b/src/gradient/types/shared_params/firewall_rule_target.py @@ -2,14 +2,16 @@ from __future__ import annotations -from typing import List, Iterable, Optional +from typing import Iterable, Optional from typing_extensions import TypedDict +from ..._types import SequenceNotStr + __all__ = ["FirewallRuleTarget"] class FirewallRuleTarget(TypedDict, total=False): - addresses: List[str] + addresses: SequenceNotStr[str] """ An array of strings containing the IPv4 addresses, IPv6 addresses, IPv4 CIDRs, and/or IPv6 CIDRs to which the firewall will allow traffic. @@ -21,19 +23,19 @@ class FirewallRuleTarget(TypedDict, total=False): traffic. """ - kubernetes_ids: List[str] + kubernetes_ids: SequenceNotStr[str] """ An array containing the IDs of the Kubernetes clusters to which the firewall will allow traffic. """ - load_balancer_uids: List[str] + load_balancer_uids: SequenceNotStr[str] """ An array containing the IDs of the load balancers to which the firewall will allow traffic. """ - tags: Optional[List[str]] + tags: Optional[SequenceNotStr[str]] """A flat array of tag names as strings to be applied to the resource. Tag names must exist in order to be referenced in a request. From 300eac0417f8f17a65bb871b15de1254f4677558 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 4 Sep 2025 03:55:25 +0000 Subject: [PATCH 03/11] feat: improve future compat with pydantic v3 --- src/gradient/_base_client.py | 6 +- src/gradient/_compat.py | 96 ++++++++--------- src/gradient/_models.py | 80 +++++++------- src/gradient/_utils/__init__.py | 10 +- src/gradient/_utils/_compat.py | 45 ++++++++ src/gradient/_utils/_datetime_parse.py | 136 ++++++++++++++++++++++++ src/gradient/_utils/_transform.py | 6 +- src/gradient/_utils/_typing.py | 2 +- src/gradient/_utils/_utils.py | 1 - src/gradient/types/__init__.py | 50 ++++----- tests/test_models.py | 48 ++++----- tests/test_transform.py | 16 +-- tests/test_utils/test_datetime_parse.py | 110 +++++++++++++++++++ tests/utils.py | 8 +- 14 files changed, 457 insertions(+), 157 deletions(-) create mode 100644 src/gradient/_utils/_compat.py create mode 100644 src/gradient/_utils/_datetime_parse.py create mode 100644 tests/test_utils/test_datetime_parse.py diff --git a/src/gradient/_base_client.py b/src/gradient/_base_client.py index 6f2db396..a5a9ad8c 100644 --- a/src/gradient/_base_client.py +++ b/src/gradient/_base_client.py @@ -59,7 +59,7 @@ ModelBuilderProtocol, ) from ._utils import is_dict, is_list, asyncify, is_given, lru_cache, is_mapping -from ._compat import PYDANTIC_V2, model_copy, model_dump +from ._compat import PYDANTIC_V1, model_copy, model_dump from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type from ._response import ( APIResponse, @@ -232,7 +232,7 @@ def _set_private_attributes( model: Type[_T], options: FinalRequestOptions, ) -> None: - if PYDANTIC_V2 and getattr(self, "__pydantic_private__", None) is None: + if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None: self.__pydantic_private__ = {} self._model = model @@ -320,7 +320,7 @@ def _set_private_attributes( client: AsyncAPIClient, options: FinalRequestOptions, ) -> None: - if PYDANTIC_V2 and getattr(self, "__pydantic_private__", None) is None: + if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None: self.__pydantic_private__ = {} self._model = model diff --git a/src/gradient/_compat.py b/src/gradient/_compat.py index 92d9ee61..bdef67f0 100644 --- a/src/gradient/_compat.py +++ b/src/gradient/_compat.py @@ -12,14 +12,13 @@ _T = TypeVar("_T") _ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel) -# --------------- Pydantic v2 compatibility --------------- +# --------------- Pydantic v2, v3 compatibility --------------- # Pyright incorrectly reports some of our functions as overriding a method when they don't # pyright: reportIncompatibleMethodOverride=false -PYDANTIC_V2 = pydantic.VERSION.startswith("2.") +PYDANTIC_V1 = pydantic.VERSION.startswith("1.") -# v1 re-exports if TYPE_CHECKING: def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001 @@ -44,90 +43,92 @@ def is_typeddict(type_: type[Any]) -> bool: # noqa: ARG001 ... else: - if PYDANTIC_V2: - from pydantic.v1.typing import ( + # v1 re-exports + if PYDANTIC_V1: + from pydantic.typing import ( get_args as get_args, is_union as is_union, get_origin as get_origin, is_typeddict as is_typeddict, is_literal_type as is_literal_type, ) - from pydantic.v1.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime + from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime else: - from pydantic.typing import ( + from ._utils import ( get_args as get_args, is_union as is_union, get_origin as get_origin, + parse_date as parse_date, is_typeddict as is_typeddict, + parse_datetime as parse_datetime, is_literal_type as is_literal_type, ) - from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime # refactored config if TYPE_CHECKING: from pydantic import ConfigDict as ConfigDict else: - if PYDANTIC_V2: - from pydantic import ConfigDict - else: + if PYDANTIC_V1: # TODO: provide an error message here? ConfigDict = None + else: + from pydantic import ConfigDict as ConfigDict # renamed methods / properties def parse_obj(model: type[_ModelT], value: object) -> _ModelT: - if PYDANTIC_V2: - return model.model_validate(value) - else: + if PYDANTIC_V1: return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + else: + return model.model_validate(value) def field_is_required(field: FieldInfo) -> bool: - if PYDANTIC_V2: - return field.is_required() - return field.required # type: ignore + if PYDANTIC_V1: + return field.required # type: ignore + return field.is_required() def field_get_default(field: FieldInfo) -> Any: value = field.get_default() - if PYDANTIC_V2: - from pydantic_core import PydanticUndefined - - if value == PydanticUndefined: - return None + if PYDANTIC_V1: return value + from pydantic_core import PydanticUndefined + + if value == PydanticUndefined: + return None return value def field_outer_type(field: FieldInfo) -> Any: - if PYDANTIC_V2: - return field.annotation - return field.outer_type_ # type: ignore + if PYDANTIC_V1: + return field.outer_type_ # type: ignore + return field.annotation def get_model_config(model: type[pydantic.BaseModel]) -> Any: - if PYDANTIC_V2: - return model.model_config - return model.__config__ # type: ignore + if PYDANTIC_V1: + return model.__config__ # type: ignore + return model.model_config def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]: - if PYDANTIC_V2: - return model.model_fields - return model.__fields__ # type: ignore + if PYDANTIC_V1: + return model.__fields__ # type: ignore + return model.model_fields def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT: - if PYDANTIC_V2: - return model.model_copy(deep=deep) - return model.copy(deep=deep) # type: ignore + if PYDANTIC_V1: + return model.copy(deep=deep) # type: ignore + return model.model_copy(deep=deep) def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str: - if PYDANTIC_V2: - return model.model_dump_json(indent=indent) - return model.json(indent=indent) # type: ignore + if PYDANTIC_V1: + return model.json(indent=indent) # type: ignore + return model.model_dump_json(indent=indent) def model_dump( @@ -139,14 +140,14 @@ def model_dump( warnings: bool = True, mode: Literal["json", "python"] = "python", ) -> dict[str, Any]: - if PYDANTIC_V2 or hasattr(model, "model_dump"): + if (not PYDANTIC_V1) or hasattr(model, "model_dump"): return model.model_dump( mode=mode, exclude=exclude, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, # warnings are not supported in Pydantic v1 - warnings=warnings if PYDANTIC_V2 else True, + warnings=True if PYDANTIC_V1 else warnings, ) return cast( "dict[str, Any]", @@ -159,9 +160,9 @@ def model_dump( def model_parse(model: type[_ModelT], data: Any) -> _ModelT: - if PYDANTIC_V2: - return model.model_validate(data) - return model.parse_obj(data) # pyright: ignore[reportDeprecated] + if PYDANTIC_V1: + return model.parse_obj(data) # pyright: ignore[reportDeprecated] + return model.model_validate(data) # generic models @@ -170,17 +171,16 @@ def model_parse(model: type[_ModelT], data: Any) -> _ModelT: class GenericModel(pydantic.BaseModel): ... else: - if PYDANTIC_V2: + if PYDANTIC_V1: + import pydantic.generics + + class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ... + else: # there no longer needs to be a distinction in v2 but # we still have to create our own subclass to avoid # inconsistent MRO ordering errors class GenericModel(pydantic.BaseModel): ... - else: - import pydantic.generics - - class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ... - # cached properties if TYPE_CHECKING: diff --git a/src/gradient/_models.py b/src/gradient/_models.py index 92f7c10b..3a6017ef 100644 --- a/src/gradient/_models.py +++ b/src/gradient/_models.py @@ -50,7 +50,7 @@ strip_annotated_type, ) from ._compat import ( - PYDANTIC_V2, + PYDANTIC_V1, ConfigDict, GenericModel as BaseGenericModel, get_args, @@ -81,11 +81,7 @@ class _ConfigProtocol(Protocol): class BaseModel(pydantic.BaseModel): - if PYDANTIC_V2: - model_config: ClassVar[ConfigDict] = ConfigDict( - extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true")) - ) - else: + if PYDANTIC_V1: @property @override @@ -95,6 +91,10 @@ def model_fields_set(self) -> set[str]: class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] extra: Any = pydantic.Extra.allow # type: ignore + else: + model_config: ClassVar[ConfigDict] = ConfigDict( + extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true")) + ) def to_dict( self, @@ -215,25 +215,25 @@ def construct( # pyright: ignore[reportIncompatibleMethodOverride] if key not in model_fields: parsed = construct_type(value=value, type_=extra_field_type) if extra_field_type is not None else value - if PYDANTIC_V2: - _extra[key] = parsed - else: + if PYDANTIC_V1: _fields_set.add(key) fields_values[key] = parsed + else: + _extra[key] = parsed object.__setattr__(m, "__dict__", fields_values) - if PYDANTIC_V2: - # these properties are copied from Pydantic's `model_construct()` method - object.__setattr__(m, "__pydantic_private__", None) - object.__setattr__(m, "__pydantic_extra__", _extra) - object.__setattr__(m, "__pydantic_fields_set__", _fields_set) - else: + if PYDANTIC_V1: # init_private_attributes() does not exist in v2 m._init_private_attributes() # type: ignore # copied from Pydantic v1's `construct()` method object.__setattr__(m, "__fields_set__", _fields_set) + else: + # these properties are copied from Pydantic's `model_construct()` method + object.__setattr__(m, "__pydantic_private__", None) + object.__setattr__(m, "__pydantic_extra__", _extra) + object.__setattr__(m, "__pydantic_fields_set__", _fields_set) return m @@ -243,7 +243,7 @@ def construct( # pyright: ignore[reportIncompatibleMethodOverride] # although not in practice model_construct = construct - if not PYDANTIC_V2: + if PYDANTIC_V1: # we define aliases for some of the new pydantic v2 methods so # that we can just document these methods without having to specify # a specific pydantic version as some users may not know which @@ -363,10 +363,10 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object: if value is None: return field_get_default(field) - if PYDANTIC_V2: - type_ = field.annotation - else: + if PYDANTIC_V1: type_ = cast(type, field.outer_type_) # type: ignore + else: + type_ = field.annotation # type: ignore if type_ is None: raise RuntimeError(f"Unexpected field type is None for {key}") @@ -375,7 +375,7 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object: def _get_extra_fields_type(cls: type[pydantic.BaseModel]) -> type | None: - if not PYDANTIC_V2: + if PYDANTIC_V1: # TODO return None @@ -628,30 +628,30 @@ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, for variant in get_args(union): variant = strip_annotated_type(variant) if is_basemodel_type(variant): - if PYDANTIC_V2: - field = _extract_field_schema_pv2(variant, discriminator_field_name) - if not field: + if PYDANTIC_V1: + field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + if not field_info: continue # Note: if one variant defines an alias then they all should - discriminator_alias = field.get("serialization_alias") - - field_schema = field["schema"] + discriminator_alias = field_info.alias - if field_schema["type"] == "literal": - for entry in cast("LiteralSchema", field_schema)["expected"]: + if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation): + for entry in get_args(annotation): if isinstance(entry, str): mapping[entry] = variant else: - field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] - if not field_info: + field = _extract_field_schema_pv2(variant, discriminator_field_name) + if not field: continue # Note: if one variant defines an alias then they all should - discriminator_alias = field_info.alias + discriminator_alias = field.get("serialization_alias") - if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation): - for entry in get_args(annotation): + field_schema = field["schema"] + + if field_schema["type"] == "literal": + for entry in cast("LiteralSchema", field_schema)["expected"]: if isinstance(entry, str): mapping[entry] = variant @@ -714,7 +714,7 @@ class GenericModel(BaseGenericModel, BaseModel): pass -if PYDANTIC_V2: +if not PYDANTIC_V1: from pydantic import TypeAdapter as _TypeAdapter _CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter)) @@ -782,12 +782,12 @@ class FinalRequestOptions(pydantic.BaseModel): json_data: Union[Body, None] = None extra_json: Union[AnyMapping, None] = None - if PYDANTIC_V2: - model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True) - else: + if PYDANTIC_V1: class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] arbitrary_types_allowed: bool = True + else: + model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True) def get_max_retries(self, max_retries: int) -> int: if isinstance(self.max_retries, NotGiven): @@ -820,9 +820,9 @@ def construct( # type: ignore key: strip_not_given(value) for key, value in values.items() } - if PYDANTIC_V2: - return super().model_construct(_fields_set, **kwargs) - return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated] + if PYDANTIC_V1: + return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated] + return super().model_construct(_fields_set, **kwargs) if not TYPE_CHECKING: # type checkers incorrectly complain about this assignment diff --git a/src/gradient/_utils/__init__.py b/src/gradient/_utils/__init__.py index ca547ce5..dc64e29a 100644 --- a/src/gradient/_utils/__init__.py +++ b/src/gradient/_utils/__init__.py @@ -10,7 +10,6 @@ lru_cache as lru_cache, is_mapping as is_mapping, is_tuple_t as is_tuple_t, - parse_date as parse_date, is_iterable as is_iterable, is_sequence as is_sequence, coerce_float as coerce_float, @@ -23,7 +22,6 @@ coerce_boolean as coerce_boolean, coerce_integer as coerce_integer, file_from_path as file_from_path, - parse_datetime as parse_datetime, strip_not_given as strip_not_given, deepcopy_minimal as deepcopy_minimal, get_async_library as get_async_library, @@ -32,6 +30,13 @@ maybe_coerce_boolean as maybe_coerce_boolean, maybe_coerce_integer as maybe_coerce_integer, ) +from ._compat import ( + get_args as get_args, + is_union as is_union, + get_origin as get_origin, + is_typeddict as is_typeddict, + is_literal_type as is_literal_type, +) from ._typing import ( is_list_type as is_list_type, is_union_type as is_union_type, @@ -56,3 +61,4 @@ function_has_argument as function_has_argument, assert_signatures_in_sync as assert_signatures_in_sync, ) +from ._datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime diff --git a/src/gradient/_utils/_compat.py b/src/gradient/_utils/_compat.py new file mode 100644 index 00000000..dd703233 --- /dev/null +++ b/src/gradient/_utils/_compat.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +import sys +import typing_extensions +from typing import Any, Type, Union, Literal, Optional +from datetime import date, datetime +from typing_extensions import get_args as _get_args, get_origin as _get_origin + +from .._types import StrBytesIntFloat +from ._datetime_parse import parse_date as _parse_date, parse_datetime as _parse_datetime + +_LITERAL_TYPES = {Literal, typing_extensions.Literal} + + +def get_args(tp: type[Any]) -> tuple[Any, ...]: + return _get_args(tp) + + +def get_origin(tp: type[Any]) -> type[Any] | None: + return _get_origin(tp) + + +def is_union(tp: Optional[Type[Any]]) -> bool: + if sys.version_info < (3, 10): + return tp is Union # type: ignore[comparison-overlap] + else: + import types + + return tp is Union or tp is types.UnionType + + +def is_typeddict(tp: Type[Any]) -> bool: + return typing_extensions.is_typeddict(tp) + + +def is_literal_type(tp: Type[Any]) -> bool: + return get_origin(tp) in _LITERAL_TYPES + + +def parse_date(value: Union[date, StrBytesIntFloat]) -> date: + return _parse_date(value) + + +def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: + return _parse_datetime(value) diff --git a/src/gradient/_utils/_datetime_parse.py b/src/gradient/_utils/_datetime_parse.py new file mode 100644 index 00000000..7cb9d9e6 --- /dev/null +++ b/src/gradient/_utils/_datetime_parse.py @@ -0,0 +1,136 @@ +""" +This file contains code from https://github.com/pydantic/pydantic/blob/main/pydantic/v1/datetime_parse.py +without the Pydantic v1 specific errors. +""" + +from __future__ import annotations + +import re +from typing import Dict, Union, Optional +from datetime import date, datetime, timezone, timedelta + +from .._types import StrBytesIntFloat + +date_expr = r"(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})" +time_expr = ( + r"(?P\d{1,2}):(?P\d{1,2})" + r"(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?" + r"(?PZ|[+-]\d{2}(?::?\d{2})?)?$" +) + +date_re = re.compile(f"{date_expr}$") +datetime_re = re.compile(f"{date_expr}[T ]{time_expr}") + + +EPOCH = datetime(1970, 1, 1) +# if greater than this, the number is in ms, if less than or equal it's in seconds +# (in seconds this is 11th October 2603, in ms it's 20th August 1970) +MS_WATERSHED = int(2e10) +# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9 +MAX_NUMBER = int(3e20) + + +def _get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]: + if isinstance(value, (int, float)): + return value + try: + return float(value) + except ValueError: + return None + except TypeError: + raise TypeError(f"invalid type; expected {native_expected_type}, string, bytes, int or float") from None + + +def _from_unix_seconds(seconds: Union[int, float]) -> datetime: + if seconds > MAX_NUMBER: + return datetime.max + elif seconds < -MAX_NUMBER: + return datetime.min + + while abs(seconds) > MS_WATERSHED: + seconds /= 1000 + dt = EPOCH + timedelta(seconds=seconds) + return dt.replace(tzinfo=timezone.utc) + + +def _parse_timezone(value: Optional[str]) -> Union[None, int, timezone]: + if value == "Z": + return timezone.utc + elif value is not None: + offset_mins = int(value[-2:]) if len(value) > 3 else 0 + offset = 60 * int(value[1:3]) + offset_mins + if value[0] == "-": + offset = -offset + return timezone(timedelta(minutes=offset)) + else: + return None + + +def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: + """ + Parse a datetime/int/float/string and return a datetime.datetime. + + This function supports time zone offsets. When the input contains one, + the output uses a timezone with a fixed offset from UTC. + + Raise ValueError if the input is well formatted but not a valid datetime. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, datetime): + return value + + number = _get_numeric(value, "datetime") + if number is not None: + return _from_unix_seconds(number) + + if isinstance(value, bytes): + value = value.decode() + + assert not isinstance(value, (float, int)) + + match = datetime_re.match(value) + if match is None: + raise ValueError("invalid datetime format") + + kw = match.groupdict() + if kw["microsecond"]: + kw["microsecond"] = kw["microsecond"].ljust(6, "0") + + tzinfo = _parse_timezone(kw.pop("tzinfo")) + kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} + kw_["tzinfo"] = tzinfo + + return datetime(**kw_) # type: ignore + + +def parse_date(value: Union[date, StrBytesIntFloat]) -> date: + """ + Parse a date/int/float/string and return a datetime.date. + + Raise ValueError if the input is well formatted but not a valid date. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, date): + if isinstance(value, datetime): + return value.date() + else: + return value + + number = _get_numeric(value, "date") + if number is not None: + return _from_unix_seconds(number).date() + + if isinstance(value, bytes): + value = value.decode() + + assert not isinstance(value, (float, int)) + match = date_re.match(value) + if match is None: + raise ValueError("invalid date format") + + kw = {k: int(v) for k, v in match.groupdict().items()} + + try: + return date(**kw) + except ValueError: + raise ValueError("invalid date format") from None diff --git a/src/gradient/_utils/_transform.py b/src/gradient/_utils/_transform.py index f0bcefd4..c19124f0 100644 --- a/src/gradient/_utils/_transform.py +++ b/src/gradient/_utils/_transform.py @@ -19,6 +19,7 @@ is_sequence, ) from .._files import is_base64_file_input +from ._compat import get_origin, is_typeddict from ._typing import ( is_list_type, is_union_type, @@ -29,7 +30,6 @@ is_annotated_type, strip_annotated_type, ) -from .._compat import get_origin, model_dump, is_typeddict _T = TypeVar("_T") @@ -169,6 +169,8 @@ def _transform_recursive( Defaults to the same value as the `annotation` argument. """ + from .._compat import model_dump + if inner_type is None: inner_type = annotation @@ -333,6 +335,8 @@ async def _async_transform_recursive( Defaults to the same value as the `annotation` argument. """ + from .._compat import model_dump + if inner_type is None: inner_type = annotation diff --git a/src/gradient/_utils/_typing.py b/src/gradient/_utils/_typing.py index 845cd6b2..193109f3 100644 --- a/src/gradient/_utils/_typing.py +++ b/src/gradient/_utils/_typing.py @@ -15,7 +15,7 @@ from ._utils import lru_cache from .._types import InheritsGeneric -from .._compat import is_union as _is_union +from ._compat import is_union as _is_union def is_annotated_type(typ: type) -> bool: diff --git a/src/gradient/_utils/_utils.py b/src/gradient/_utils/_utils.py index ea3cf3f2..f0818595 100644 --- a/src/gradient/_utils/_utils.py +++ b/src/gradient/_utils/_utils.py @@ -22,7 +22,6 @@ import sniffio from .._types import NotGiven, FileTypes, NotGivenOr, HeadersLike -from .._compat import parse_date as parse_date, parse_datetime as parse_datetime _T = TypeVar("_T") _TupleT = TypeVar("_TupleT", bound=Tuple[object, ...]) diff --git a/src/gradient/types/__init__.py b/src/gradient/types/__init__.py index d5486cba..d3d7dab0 100644 --- a/src/gradient/types/__init__.py +++ b/src/gradient/types/__init__.py @@ -151,7 +151,31 @@ # This ensures that, when building the deferred (due to cyclical references) model schema, # Pydantic can resolve the necessary references. # See: https://github.com/pydantic/pydantic/issues/11250 for more context. -if _compat.PYDANTIC_V2: +if _compat.PYDANTIC_V1: + api_agent.APIAgent.update_forward_refs() # type: ignore + api_workspace.APIWorkspace.update_forward_refs() # type: ignore + agent_create_response.AgentCreateResponse.update_forward_refs() # type: ignore + agent_retrieve_response.AgentRetrieveResponse.update_forward_refs() # type: ignore + agent_update_response.AgentUpdateResponse.update_forward_refs() # type: ignore + agent_delete_response.AgentDeleteResponse.update_forward_refs() # type: ignore + agent_update_status_response.AgentUpdateStatusResponse.update_forward_refs() # type: ignore + agents.evaluation_metrics.workspace_create_response.WorkspaceCreateResponse.update_forward_refs() # type: ignore + agents.evaluation_metrics.workspace_retrieve_response.WorkspaceRetrieveResponse.update_forward_refs() # type: ignore + agents.evaluation_metrics.workspace_update_response.WorkspaceUpdateResponse.update_forward_refs() # type: ignore + agents.evaluation_metrics.workspace_list_response.WorkspaceListResponse.update_forward_refs() # type: ignore + agents.evaluation_metrics.workspaces.agent_list_response.AgentListResponse.update_forward_refs() # type: ignore + agents.evaluation_metrics.workspaces.agent_move_response.AgentMoveResponse.update_forward_refs() # type: ignore + agents.evaluation_metrics.anthropic.key_list_agents_response.KeyListAgentsResponse.update_forward_refs() # type: ignore + agents.evaluation_metrics.openai.key_list_agents_response.KeyListAgentsResponse.update_forward_refs() # type: ignore + agents.function_create_response.FunctionCreateResponse.update_forward_refs() # type: ignore + agents.function_update_response.FunctionUpdateResponse.update_forward_refs() # type: ignore + agents.function_delete_response.FunctionDeleteResponse.update_forward_refs() # type: ignore + agents.api_link_knowledge_base_output.APILinkKnowledgeBaseOutput.update_forward_refs() # type: ignore + agents.knowledge_base_detach_response.KnowledgeBaseDetachResponse.update_forward_refs() # type: ignore + agents.route_view_response.RouteViewResponse.update_forward_refs() # type: ignore + models.providers.anthropic_list_agents_response.AnthropicListAgentsResponse.update_forward_refs() # type: ignore + models.providers.openai_retrieve_agents_response.OpenAIRetrieveAgentsResponse.update_forward_refs() # type: ignore +else: api_agent.APIAgent.model_rebuild(_parent_namespace_depth=0) api_workspace.APIWorkspace.model_rebuild(_parent_namespace_depth=0) agent_create_response.AgentCreateResponse.model_rebuild(_parent_namespace_depth=0) @@ -183,27 +207,3 @@ models.providers.openai_retrieve_agents_response.OpenAIRetrieveAgentsResponse.model_rebuild( _parent_namespace_depth=0 ) -else: - api_agent.APIAgent.update_forward_refs() # type: ignore - api_workspace.APIWorkspace.update_forward_refs() # type: ignore - agent_create_response.AgentCreateResponse.update_forward_refs() # type: ignore - agent_retrieve_response.AgentRetrieveResponse.update_forward_refs() # type: ignore - agent_update_response.AgentUpdateResponse.update_forward_refs() # type: ignore - agent_delete_response.AgentDeleteResponse.update_forward_refs() # type: ignore - agent_update_status_response.AgentUpdateStatusResponse.update_forward_refs() # type: ignore - agents.evaluation_metrics.workspace_create_response.WorkspaceCreateResponse.update_forward_refs() # type: ignore - agents.evaluation_metrics.workspace_retrieve_response.WorkspaceRetrieveResponse.update_forward_refs() # type: ignore - agents.evaluation_metrics.workspace_update_response.WorkspaceUpdateResponse.update_forward_refs() # type: ignore - agents.evaluation_metrics.workspace_list_response.WorkspaceListResponse.update_forward_refs() # type: ignore - agents.evaluation_metrics.workspaces.agent_list_response.AgentListResponse.update_forward_refs() # type: ignore - agents.evaluation_metrics.workspaces.agent_move_response.AgentMoveResponse.update_forward_refs() # type: ignore - agents.evaluation_metrics.anthropic.key_list_agents_response.KeyListAgentsResponse.update_forward_refs() # type: ignore - agents.evaluation_metrics.openai.key_list_agents_response.KeyListAgentsResponse.update_forward_refs() # type: ignore - agents.function_create_response.FunctionCreateResponse.update_forward_refs() # type: ignore - agents.function_update_response.FunctionUpdateResponse.update_forward_refs() # type: ignore - agents.function_delete_response.FunctionDeleteResponse.update_forward_refs() # type: ignore - agents.api_link_knowledge_base_output.APILinkKnowledgeBaseOutput.update_forward_refs() # type: ignore - agents.knowledge_base_detach_response.KnowledgeBaseDetachResponse.update_forward_refs() # type: ignore - agents.route_view_response.RouteViewResponse.update_forward_refs() # type: ignore - models.providers.anthropic_list_agents_response.AnthropicListAgentsResponse.update_forward_refs() # type: ignore - models.providers.openai_retrieve_agents_response.OpenAIRetrieveAgentsResponse.update_forward_refs() # type: ignore diff --git a/tests/test_models.py b/tests/test_models.py index 9a2ee908..de5ef465 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -8,7 +8,7 @@ from pydantic import Field from gradient._utils import PropertyInfo -from gradient._compat import PYDANTIC_V2, parse_obj, model_dump, model_json +from gradient._compat import PYDANTIC_V1, parse_obj, model_dump, model_json from gradient._models import BaseModel, construct_type @@ -294,12 +294,12 @@ class Model(BaseModel): assert cast(bool, m.foo) is True m = Model.construct(foo={"name": 3}) - if PYDANTIC_V2: - assert isinstance(m.foo, Submodel1) - assert m.foo.name == 3 # type: ignore - else: + if PYDANTIC_V1: assert isinstance(m.foo, Submodel2) assert m.foo.name == "3" + else: + assert isinstance(m.foo, Submodel1) + assert m.foo.name == 3 # type: ignore def test_list_of_unions() -> None: @@ -426,10 +426,10 @@ class Model(BaseModel): expected = datetime(2019, 12, 27, 18, 11, 19, 117000, tzinfo=timezone.utc) - if PYDANTIC_V2: - expected_json = '{"created_at":"2019-12-27T18:11:19.117000Z"}' - else: + if PYDANTIC_V1: expected_json = '{"created_at": "2019-12-27T18:11:19.117000+00:00"}' + else: + expected_json = '{"created_at":"2019-12-27T18:11:19.117000Z"}' model = Model.construct(created_at="2019-12-27T18:11:19.117Z") assert model.created_at == expected @@ -531,7 +531,7 @@ class Model2(BaseModel): assert m4.to_dict(mode="python") == {"created_at": datetime.fromisoformat(time_str)} assert m4.to_dict(mode="json") == {"created_at": time_str} - if not PYDANTIC_V2: + if PYDANTIC_V1: with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): m.to_dict(warnings=False) @@ -556,7 +556,7 @@ class Model(BaseModel): assert m3.model_dump() == {"foo": None} assert m3.model_dump(exclude_none=True) == {} - if not PYDANTIC_V2: + if PYDANTIC_V1: with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"): m.model_dump(round_trip=True) @@ -580,10 +580,10 @@ class Model(BaseModel): assert json.loads(m.to_json()) == {"FOO": "hello"} assert json.loads(m.to_json(use_api_names=False)) == {"foo": "hello"} - if PYDANTIC_V2: - assert m.to_json(indent=None) == '{"FOO":"hello"}' - else: + if PYDANTIC_V1: assert m.to_json(indent=None) == '{"FOO": "hello"}' + else: + assert m.to_json(indent=None) == '{"FOO":"hello"}' m2 = Model() assert json.loads(m2.to_json()) == {} @@ -595,7 +595,7 @@ class Model(BaseModel): assert json.loads(m3.to_json()) == {"FOO": None} assert json.loads(m3.to_json(exclude_none=True)) == {} - if not PYDANTIC_V2: + if PYDANTIC_V1: with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): m.to_json(warnings=False) @@ -622,7 +622,7 @@ class Model(BaseModel): assert json.loads(m3.model_dump_json()) == {"foo": None} assert json.loads(m3.model_dump_json(exclude_none=True)) == {} - if not PYDANTIC_V2: + if PYDANTIC_V1: with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"): m.model_dump_json(round_trip=True) @@ -679,12 +679,12 @@ class B(BaseModel): ) assert isinstance(m, A) assert m.type == "a" - if PYDANTIC_V2: - assert m.data == 100 # type: ignore[comparison-overlap] - else: + if PYDANTIC_V1: # pydantic v1 automatically converts inputs to strings # if the expected type is a str assert m.data == "100" + else: + assert m.data == 100 # type: ignore[comparison-overlap] def test_discriminated_unions_unknown_variant() -> None: @@ -768,12 +768,12 @@ class B(BaseModel): ) assert isinstance(m, A) assert m.foo_type == "a" - if PYDANTIC_V2: - assert m.data == 100 # type: ignore[comparison-overlap] - else: + if PYDANTIC_V1: # pydantic v1 automatically converts inputs to strings # if the expected type is a str assert m.data == "100" + else: + assert m.data == 100 # type: ignore[comparison-overlap] def test_discriminated_unions_overlapping_discriminators_invalid_data() -> None: @@ -833,7 +833,7 @@ class B(BaseModel): assert UnionType.__discriminator__ is discriminator -@pytest.mark.skipif(not PYDANTIC_V2, reason="TypeAliasType is not supported in Pydantic v1") +@pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1") def test_type_alias_type() -> None: Alias = TypeAliasType("Alias", str) # pyright: ignore @@ -849,7 +849,7 @@ class Model(BaseModel): assert m.union == "bar" -@pytest.mark.skipif(not PYDANTIC_V2, reason="TypeAliasType is not supported in Pydantic v1") +@pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1") def test_field_named_cls() -> None: class Model(BaseModel): cls: str @@ -936,7 +936,7 @@ class Type2(BaseModel): assert isinstance(model.value, InnerType2) -@pytest.mark.skipif(not PYDANTIC_V2, reason="this is only supported in pydantic v2 for now") +@pytest.mark.skipif(PYDANTIC_V1, reason="this is only supported in pydantic v2 for now") def test_extra_properties() -> None: class Item(BaseModel): prop: int diff --git a/tests/test_transform.py b/tests/test_transform.py index 552462fa..db909f25 100644 --- a/tests/test_transform.py +++ b/tests/test_transform.py @@ -15,7 +15,7 @@ parse_datetime, async_transform as _async_transform, ) -from gradient._compat import PYDANTIC_V2 +from gradient._compat import PYDANTIC_V1 from gradient._models import BaseModel _T = TypeVar("_T") @@ -189,7 +189,7 @@ class DateModel(BaseModel): @pytest.mark.asyncio async def test_iso8601_format(use_async: bool) -> None: dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") - tz = "Z" if PYDANTIC_V2 else "+00:00" + tz = "+00:00" if PYDANTIC_V1 else "Z" assert await transform({"foo": dt}, DatetimeDict, use_async) == {"foo": "2023-02-23T14:16:36.337692+00:00"} # type: ignore[comparison-overlap] assert await transform(DatetimeModel(foo=dt), Any, use_async) == {"foo": "2023-02-23T14:16:36.337692" + tz} # type: ignore[comparison-overlap] @@ -297,11 +297,11 @@ async def test_pydantic_unknown_field(use_async: bool) -> None: @pytest.mark.asyncio async def test_pydantic_mismatched_types(use_async: bool) -> None: model = MyModel.construct(foo=True) - if PYDANTIC_V2: + if PYDANTIC_V1: + params = await transform(model, Any, use_async) + else: with pytest.warns(UserWarning): params = await transform(model, Any, use_async) - else: - params = await transform(model, Any, use_async) assert cast(Any, params) == {"foo": True} @@ -309,11 +309,11 @@ async def test_pydantic_mismatched_types(use_async: bool) -> None: @pytest.mark.asyncio async def test_pydantic_mismatched_object_type(use_async: bool) -> None: model = MyModel.construct(foo=MyModel.construct(hello="world")) - if PYDANTIC_V2: + if PYDANTIC_V1: + params = await transform(model, Any, use_async) + else: with pytest.warns(UserWarning): params = await transform(model, Any, use_async) - else: - params = await transform(model, Any, use_async) assert cast(Any, params) == {"foo": {"hello": "world"}} diff --git a/tests/test_utils/test_datetime_parse.py b/tests/test_utils/test_datetime_parse.py new file mode 100644 index 00000000..6cbb1b6f --- /dev/null +++ b/tests/test_utils/test_datetime_parse.py @@ -0,0 +1,110 @@ +""" +Copied from https://github.com/pydantic/pydantic/blob/v1.10.22/tests/test_datetime_parse.py +with modifications so it works without pydantic v1 imports. +""" + +from typing import Type, Union +from datetime import date, datetime, timezone, timedelta + +import pytest + +from gradient._utils import parse_date, parse_datetime + + +def create_tz(minutes: int) -> timezone: + return timezone(timedelta(minutes=minutes)) + + +@pytest.mark.parametrize( + "value,result", + [ + # Valid inputs + ("1494012444.883309", date(2017, 5, 5)), + (b"1494012444.883309", date(2017, 5, 5)), + (1_494_012_444.883_309, date(2017, 5, 5)), + ("1494012444", date(2017, 5, 5)), + (1_494_012_444, date(2017, 5, 5)), + (0, date(1970, 1, 1)), + ("2012-04-23", date(2012, 4, 23)), + (b"2012-04-23", date(2012, 4, 23)), + ("2012-4-9", date(2012, 4, 9)), + (date(2012, 4, 9), date(2012, 4, 9)), + (datetime(2012, 4, 9, 12, 15), date(2012, 4, 9)), + # Invalid inputs + ("x20120423", ValueError), + ("2012-04-56", ValueError), + (19_999_999_999, date(2603, 10, 11)), # just before watershed + (20_000_000_001, date(1970, 8, 20)), # just after watershed + (1_549_316_052, date(2019, 2, 4)), # nowish in s + (1_549_316_052_104, date(2019, 2, 4)), # nowish in ms + (1_549_316_052_104_324, date(2019, 2, 4)), # nowish in μs + (1_549_316_052_104_324_096, date(2019, 2, 4)), # nowish in ns + ("infinity", date(9999, 12, 31)), + ("inf", date(9999, 12, 31)), + (float("inf"), date(9999, 12, 31)), + ("infinity ", date(9999, 12, 31)), + (int("1" + "0" * 100), date(9999, 12, 31)), + (1e1000, date(9999, 12, 31)), + ("-infinity", date(1, 1, 1)), + ("-inf", date(1, 1, 1)), + ("nan", ValueError), + ], +) +def test_date_parsing(value: Union[str, bytes, int, float], result: Union[date, Type[Exception]]) -> None: + if type(result) == type and issubclass(result, Exception): # pyright: ignore[reportUnnecessaryIsInstance] + with pytest.raises(result): + parse_date(value) + else: + assert parse_date(value) == result + + +@pytest.mark.parametrize( + "value,result", + [ + # Valid inputs + # values in seconds + ("1494012444.883309", datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)), + (1_494_012_444.883_309, datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)), + ("1494012444", datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + (b"1494012444", datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + (1_494_012_444, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + # values in ms + ("1494012444000.883309", datetime(2017, 5, 5, 19, 27, 24, 883, tzinfo=timezone.utc)), + ("-1494012444000.883309", datetime(1922, 8, 29, 4, 32, 35, 999117, tzinfo=timezone.utc)), + (1_494_012_444_000, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + ("2012-04-23T09:15:00", datetime(2012, 4, 23, 9, 15)), + ("2012-4-9 4:8:16", datetime(2012, 4, 9, 4, 8, 16)), + ("2012-04-23T09:15:00Z", datetime(2012, 4, 23, 9, 15, 0, 0, timezone.utc)), + ("2012-4-9 4:8:16-0320", datetime(2012, 4, 9, 4, 8, 16, 0, create_tz(-200))), + ("2012-04-23T10:20:30.400+02:30", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(150))), + ("2012-04-23T10:20:30.400+02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(120))), + ("2012-04-23T10:20:30.400-02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))), + (b"2012-04-23T10:20:30.400-02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))), + (datetime(2017, 5, 5), datetime(2017, 5, 5)), + (0, datetime(1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc)), + # Invalid inputs + ("x20120423091500", ValueError), + ("2012-04-56T09:15:90", ValueError), + ("2012-04-23T11:05:00-25:00", ValueError), + (19_999_999_999, datetime(2603, 10, 11, 11, 33, 19, tzinfo=timezone.utc)), # just before watershed + (20_000_000_001, datetime(1970, 8, 20, 11, 33, 20, 1000, tzinfo=timezone.utc)), # just after watershed + (1_549_316_052, datetime(2019, 2, 4, 21, 34, 12, 0, tzinfo=timezone.utc)), # nowish in s + (1_549_316_052_104, datetime(2019, 2, 4, 21, 34, 12, 104_000, tzinfo=timezone.utc)), # nowish in ms + (1_549_316_052_104_324, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in μs + (1_549_316_052_104_324_096, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in ns + ("infinity", datetime(9999, 12, 31, 23, 59, 59, 999999)), + ("inf", datetime(9999, 12, 31, 23, 59, 59, 999999)), + ("inf ", datetime(9999, 12, 31, 23, 59, 59, 999999)), + (1e50, datetime(9999, 12, 31, 23, 59, 59, 999999)), + (float("inf"), datetime(9999, 12, 31, 23, 59, 59, 999999)), + ("-infinity", datetime(1, 1, 1, 0, 0)), + ("-inf", datetime(1, 1, 1, 0, 0)), + ("nan", ValueError), + ], +) +def test_datetime_parsing(value: Union[str, bytes, int, float], result: Union[datetime, Type[Exception]]) -> None: + if type(result) == type and issubclass(result, Exception): # pyright: ignore[reportUnnecessaryIsInstance] + with pytest.raises(result): + parse_datetime(value) + else: + assert parse_datetime(value) == result diff --git a/tests/utils.py b/tests/utils.py index ac014538..8d9112d6 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -19,7 +19,7 @@ is_annotated_type, is_type_alias_type, ) -from gradient._compat import PYDANTIC_V2, field_outer_type, get_model_fields +from gradient._compat import PYDANTIC_V1, field_outer_type, get_model_fields from gradient._models import BaseModel BaseModelT = TypeVar("BaseModelT", bound=BaseModel) @@ -28,12 +28,12 @@ def assert_matches_model(model: type[BaseModelT], value: BaseModelT, *, path: list[str]) -> bool: for name, field in get_model_fields(model).items(): field_value = getattr(value, name) - if PYDANTIC_V2: - allow_none = False - else: + if PYDANTIC_V1: # in v1 nullability was structured differently # https://docs.pydantic.dev/2.0/migration/#required-optional-and-nullable-fields allow_none = getattr(field, "allow_none", False) + else: + allow_none = False assert_matches_type( field_outer_type(field), From 25c044818b636e3307af2fefd2add15a6e650e8d Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 5 Sep 2025 04:30:15 +0000 Subject: [PATCH 04/11] chore(internal): move mypy configurations to `pyproject.toml` file --- mypy.ini | 50 ------------------------------------------------ pyproject.toml | 52 ++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 52 insertions(+), 50 deletions(-) delete mode 100644 mypy.ini diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index 9a8e555e..00000000 --- a/mypy.ini +++ /dev/null @@ -1,50 +0,0 @@ -[mypy] -pretty = True -show_error_codes = True - -# Exclude _files.py because mypy isn't smart enough to apply -# the correct type narrowing and as this is an internal module -# it's fine to just use Pyright. -# -# We also exclude our `tests` as mypy doesn't always infer -# types correctly and Pyright will still catch any type errors. -exclude = ^(src/gradient/_files\.py|_dev/.*\.py|tests/.*)$ - -strict_equality = True -implicit_reexport = True -check_untyped_defs = True -no_implicit_optional = True - -warn_return_any = True -warn_unreachable = True -warn_unused_configs = True - -# Turn these options off as it could cause conflicts -# with the Pyright options. -warn_unused_ignores = False -warn_redundant_casts = False - -disallow_any_generics = True -disallow_untyped_defs = True -disallow_untyped_calls = True -disallow_subclassing_any = True -disallow_incomplete_defs = True -disallow_untyped_decorators = True -cache_fine_grained = True - -# By default, mypy reports an error if you assign a value to the result -# of a function call that doesn't return anything. We do this in our test -# cases: -# ``` -# result = ... -# assert result is None -# ``` -# Changing this codegen to make mypy happy would increase complexity -# and would not be worth it. -disable_error_code = func-returns-value,overload-cannot-match - -# https://github.com/python/mypy/issues/12162 -[mypy.overrides] -module = "black.files.*" -ignore_errors = true -ignore_missing_imports = true diff --git a/pyproject.toml b/pyproject.toml index bde954ca..8dcc8f0c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -144,6 +144,58 @@ reportOverlappingOverload = false reportImportCycles = false reportPrivateUsage = false +[tool.mypy] +pretty = true +show_error_codes = true + +# Exclude _files.py because mypy isn't smart enough to apply +# the correct type narrowing and as this is an internal module +# it's fine to just use Pyright. +# +# We also exclude our `tests` as mypy doesn't always infer +# types correctly and Pyright will still catch any type errors. +exclude = ['src/gradient/_files.py', '_dev/.*.py', 'tests/.*'] + +strict_equality = true +implicit_reexport = true +check_untyped_defs = true +no_implicit_optional = true + +warn_return_any = true +warn_unreachable = true +warn_unused_configs = true + +# Turn these options off as it could cause conflicts +# with the Pyright options. +warn_unused_ignores = false +warn_redundant_casts = false + +disallow_any_generics = true +disallow_untyped_defs = true +disallow_untyped_calls = true +disallow_subclassing_any = true +disallow_incomplete_defs = true +disallow_untyped_decorators = true +cache_fine_grained = true + +# By default, mypy reports an error if you assign a value to the result +# of a function call that doesn't return anything. We do this in our test +# cases: +# ``` +# result = ... +# assert result is None +# ``` +# Changing this codegen to make mypy happy would increase complexity +# and would not be worth it. +disable_error_code = "func-returns-value,overload-cannot-match" + +# https://github.com/python/mypy/issues/12162 +[[tool.mypy.overrides]] +module = "black.files.*" +ignore_errors = true +ignore_missing_imports = true + + [tool.ruff] line-length = 120 output-format = "grouped" From b839e4b31c1262157544bd69536051a10d6b098d Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 6 Sep 2025 05:16:48 +0000 Subject: [PATCH 05/11] chore(tests): simplify `get_platform` test `nest_asyncio` is archived and broken on some platforms so it's not worth keeping in our test suite. --- pyproject.toml | 1 - requirements-dev.lock | 1 - tests/test_client.py | 55 +++++-------------------------------------- 3 files changed, 6 insertions(+), 51 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8dcc8f0c..5fc8df7f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,7 +54,6 @@ dev-dependencies = [ "dirty-equals>=0.6.0", "importlib-metadata>=6.7.0", "rich>=13.7.1", - "nest_asyncio==1.6.0", "pytest-xdist>=3.6.1", ] diff --git a/requirements-dev.lock b/requirements-dev.lock index 7a0f60ab..af44e06b 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -75,7 +75,6 @@ multidict==6.4.4 mypy==1.14.1 mypy-extensions==1.0.0 # via mypy -nest-asyncio==1.6.0 nodeenv==1.8.0 # via pyright nox==2023.4.22 diff --git a/tests/test_client.py b/tests/test_client.py index 9422604d..98833ff2 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -6,13 +6,10 @@ import os import sys import json -import time import asyncio import inspect -import subprocess import tracemalloc from typing import Any, Union, cast -from textwrap import dedent from unittest import mock from typing_extensions import Literal @@ -23,6 +20,7 @@ from gradient import Gradient, AsyncGradient, APIResponseValidationError from gradient._types import Omit +from gradient._utils import asyncify from gradient._models import BaseModel, FinalRequestOptions from gradient._streaming import Stream, AsyncStream from gradient._exceptions import ( @@ -34,8 +32,10 @@ DEFAULT_TIMEOUT, HTTPX_DEFAULT_TIMEOUT, BaseClient, + OtherPlatform, DefaultHttpxClient, DefaultAsyncHttpxClient, + get_platform, make_request_options, ) @@ -2058,52 +2058,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: assert response.http_request.headers.get("x-stainless-retry-count") == "42" - def test_get_platform(self) -> None: - # A previous implementation of asyncify could leave threads unterminated when - # used with nest_asyncio. - # - # Since nest_asyncio.apply() is global and cannot be un-applied, this - # test is run in a separate process to avoid affecting other tests. - test_code = dedent( - """ - import asyncio - import nest_asyncio - import threading - - from gradient._utils import asyncify - from gradient._base_client import get_platform - - async def test_main() -> None: - result = await asyncify(get_platform)() - print(result) - for thread in threading.enumerate(): - print(thread.name) - - nest_asyncio.apply() - asyncio.run(test_main()) - """ - ) - with subprocess.Popen( - [sys.executable, "-c", test_code], - text=True, - ) as process: - timeout = 10 # seconds - - start_time = time.monotonic() - while True: - return_code = process.poll() - if return_code is not None: - if return_code != 0: - raise AssertionError("calling get_platform using asyncify resulted in a non-zero exit code") - - # success - break - - if time.monotonic() - start_time > timeout: - process.kill() - raise AssertionError("calling get_platform using asyncify resulted in a hung process") - - time.sleep(0.1) + async def test_get_platform(self) -> None: + platform = await asyncify(get_platform)() + assert isinstance(platform, (str, OtherPlatform)) async def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None: # Test that the proxy environment variables are set correctly From 0ad7f2f9545d1d8f7199744de9469b3507f93b7e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 12 Sep 2025 15:29:45 +0000 Subject: [PATCH 06/11] codegen metadata --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 4a621094..512aabaf 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 170 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradient-621c3ebf5011c5ca508f78fccbea17de4ca6b35bfe99578c1ae2265021578d6f.yml openapi_spec_hash: e29d14e3e4679fcf22b3e760e49931b1 -config_hash: 6c8d569b60ae6536708a165b72ff838f +config_hash: 08281b73cbc4aa830d1fa79914dc79fe From a3a3e302f31d92baa9b055c9c8152abd5293a6b4 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 12 Sep 2025 15:30:12 +0000 Subject: [PATCH 07/11] codegen metadata --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 512aabaf..df2d2abe 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 170 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradient-621c3ebf5011c5ca508f78fccbea17de4ca6b35bfe99578c1ae2265021578d6f.yml openapi_spec_hash: e29d14e3e4679fcf22b3e760e49931b1 -config_hash: 08281b73cbc4aa830d1fa79914dc79fe +config_hash: 6e3aacb34562b5a1ef74bb279f55cbec From c17086aaed18fbb8ba85f050556a193cdc4a233f Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 12 Sep 2025 15:53:44 +0000 Subject: [PATCH 08/11] feat(api): enable typescript --- .stats.yml | 4 +- api.md | 26 ++ src/gradient/resources/agents/agents.py | 120 +++++++++ .../agents/evaluation_metrics/__init__.py | 14 ++ .../evaluation_metrics/evaluation_metrics.py | 32 +++ .../evaluation_metrics/oauth2/__init__.py | 33 +++ .../evaluation_metrics/oauth2/dropbox.py | 193 +++++++++++++++ .../evaluation_metrics/oauth2/oauth2.py | 229 ++++++++++++++++++ src/gradient/types/__init__.py | 2 + .../types/agent_retrieve_usage_params.py | 18 ++ .../types/agent_retrieve_usage_response.py | 48 ++++ .../agents/evaluation_metrics/__init__.py | 2 + .../evaluation_metrics/oauth2/__init__.py | 6 + .../oauth2/dropbox_create_tokens_params.py | 15 ++ .../oauth2/dropbox_create_tokens_response.py | 15 ++ .../oauth2_generate_url_params.py | 15 ++ .../oauth2_generate_url_response.py | 12 + .../evaluation_metrics/oauth2/__init__.py | 1 + .../evaluation_metrics/oauth2/test_dropbox.py | 100 ++++++++ .../agents/evaluation_metrics/test_oauth2.py | 98 ++++++++ tests/api_resources/test_agents.py | 105 ++++++++ 21 files changed, 1086 insertions(+), 2 deletions(-) create mode 100644 src/gradient/resources/agents/evaluation_metrics/oauth2/__init__.py create mode 100644 src/gradient/resources/agents/evaluation_metrics/oauth2/dropbox.py create mode 100644 src/gradient/resources/agents/evaluation_metrics/oauth2/oauth2.py create mode 100644 src/gradient/types/agent_retrieve_usage_params.py create mode 100644 src/gradient/types/agent_retrieve_usage_response.py create mode 100644 src/gradient/types/agents/evaluation_metrics/oauth2/__init__.py create mode 100644 src/gradient/types/agents/evaluation_metrics/oauth2/dropbox_create_tokens_params.py create mode 100644 src/gradient/types/agents/evaluation_metrics/oauth2/dropbox_create_tokens_response.py create mode 100644 src/gradient/types/agents/evaluation_metrics/oauth2_generate_url_params.py create mode 100644 src/gradient/types/agents/evaluation_metrics/oauth2_generate_url_response.py create mode 100644 tests/api_resources/agents/evaluation_metrics/oauth2/__init__.py create mode 100644 tests/api_resources/agents/evaluation_metrics/oauth2/test_dropbox.py create mode 100644 tests/api_resources/agents/evaluation_metrics/test_oauth2.py diff --git a/.stats.yml b/.stats.yml index df2d2abe..e30c19b7 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 170 +configured_endpoints: 173 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradient-621c3ebf5011c5ca508f78fccbea17de4ca6b35bfe99578c1ae2265021578d6f.yml openapi_spec_hash: e29d14e3e4679fcf22b3e760e49931b1 -config_hash: 6e3aacb34562b5a1ef74bb279f55cbec +config_hash: 3d425c415b7f7ab581418b43eb521cb3 diff --git a/api.md b/api.md index 1091e4dc..7299b3c6 100644 --- a/api.md +++ b/api.md @@ -51,6 +51,7 @@ from gradient.types import ( AgentUpdateResponse, AgentListResponse, AgentDeleteResponse, + AgentRetrieveUsageResponse, AgentUpdateStatusResponse, ) ``` @@ -62,6 +63,7 @@ Methods: - client.agents.update(path_uuid, \*\*params) -> AgentUpdateResponse - client.agents.list(\*\*params) -> AgentListResponse - client.agents.delete(uuid) -> AgentDeleteResponse +- client.agents.retrieve_usage(uuid, \*\*params) -> AgentRetrieveUsageResponse - client.agents.update_status(path_uuid, \*\*params) -> AgentUpdateStatusResponse ## APIKeys @@ -214,6 +216,30 @@ Methods: - client.agents.evaluation_metrics.openai.keys.delete(api_key_uuid) -> KeyDeleteResponse - client.agents.evaluation_metrics.openai.keys.list_agents(uuid, \*\*params) -> KeyListAgentsResponse +### Oauth2 + +Types: + +```python +from gradient.types.agents.evaluation_metrics import Oauth2GenerateURLResponse +``` + +Methods: + +- client.agents.evaluation_metrics.oauth2.generate_url(\*\*params) -> Oauth2GenerateURLResponse + +#### Dropbox + +Types: + +```python +from gradient.types.agents.evaluation_metrics.oauth2 import DropboxCreateTokensResponse +``` + +Methods: + +- client.agents.evaluation_metrics.oauth2.dropbox.create_tokens(\*\*params) -> DropboxCreateTokensResponse + ## EvaluationRuns Types: diff --git a/src/gradient/resources/agents/agents.py b/src/gradient/resources/agents/agents.py index 8d06584c..590b9a76 100644 --- a/src/gradient/resources/agents/agents.py +++ b/src/gradient/resources/agents/agents.py @@ -19,6 +19,7 @@ agent_create_params, agent_update_params, agent_update_status_params, + agent_retrieve_usage_params, ) from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr from ..._utils import maybe_transform, async_maybe_transform @@ -103,6 +104,7 @@ from ...types.agent_retrieve_response import AgentRetrieveResponse from ...types.api_deployment_visibility import APIDeploymentVisibility from ...types.agent_update_status_response import AgentUpdateStatusResponse +from ...types.agent_retrieve_usage_response import AgentRetrieveUsageResponse from .evaluation_metrics.evaluation_metrics import ( EvaluationMetricsResource, AsyncEvaluationMetricsResource, @@ -500,6 +502,59 @@ def delete( cast_to=AgentDeleteResponse, ) + def retrieve_usage( + self, + uuid: str, + *, + start: str | NotGiven = NOT_GIVEN, + stop: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AgentRetrieveUsageResponse: + """ + To get agent usage, send a GET request to `/v2/gen-ai/agents/{uuid}/usage`. + Returns usage metrics for the specified agent within the provided time range. + + Args: + start: Return all usage data from this date. + + stop: Return all usage data up to this date, if omitted, will return up to the current + date. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not uuid: + raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") + return self._get( + f"/v2/gen-ai/agents/{uuid}/usage" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{uuid}/usage", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "start": start, + "stop": stop, + }, + agent_retrieve_usage_params.AgentRetrieveUsageParams, + ), + ), + cast_to=AgentRetrieveUsageResponse, + ) + def update_status( self, path_uuid: str, @@ -943,6 +998,59 @@ async def delete( cast_to=AgentDeleteResponse, ) + async def retrieve_usage( + self, + uuid: str, + *, + start: str | NotGiven = NOT_GIVEN, + stop: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AgentRetrieveUsageResponse: + """ + To get agent usage, send a GET request to `/v2/gen-ai/agents/{uuid}/usage`. + Returns usage metrics for the specified agent within the provided time range. + + Args: + start: Return all usage data from this date. + + stop: Return all usage data up to this date, if omitted, will return up to the current + date. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not uuid: + raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") + return await self._get( + f"/v2/gen-ai/agents/{uuid}/usage" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{uuid}/usage", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "start": start, + "stop": stop, + }, + agent_retrieve_usage_params.AgentRetrieveUsageParams, + ), + ), + cast_to=AgentRetrieveUsageResponse, + ) + async def update_status( self, path_uuid: str, @@ -1020,6 +1128,9 @@ def __init__(self, agents: AgentsResource) -> None: self.delete = to_raw_response_wrapper( agents.delete, ) + self.retrieve_usage = to_raw_response_wrapper( + agents.retrieve_usage, + ) self.update_status = to_raw_response_wrapper( agents.update_status, ) @@ -1084,6 +1195,9 @@ def __init__(self, agents: AsyncAgentsResource) -> None: self.delete = async_to_raw_response_wrapper( agents.delete, ) + self.retrieve_usage = async_to_raw_response_wrapper( + agents.retrieve_usage, + ) self.update_status = async_to_raw_response_wrapper( agents.update_status, ) @@ -1148,6 +1262,9 @@ def __init__(self, agents: AgentsResource) -> None: self.delete = to_streamed_response_wrapper( agents.delete, ) + self.retrieve_usage = to_streamed_response_wrapper( + agents.retrieve_usage, + ) self.update_status = to_streamed_response_wrapper( agents.update_status, ) @@ -1212,6 +1329,9 @@ def __init__(self, agents: AsyncAgentsResource) -> None: self.delete = async_to_streamed_response_wrapper( agents.delete, ) + self.retrieve_usage = async_to_streamed_response_wrapper( + agents.retrieve_usage, + ) self.update_status = async_to_streamed_response_wrapper( agents.update_status, ) diff --git a/src/gradient/resources/agents/evaluation_metrics/__init__.py b/src/gradient/resources/agents/evaluation_metrics/__init__.py index 92449820..31e2f93b 100644 --- a/src/gradient/resources/agents/evaluation_metrics/__init__.py +++ b/src/gradient/resources/agents/evaluation_metrics/__init__.py @@ -8,6 +8,14 @@ ModelsResourceWithStreamingResponse, AsyncModelsResourceWithStreamingResponse, ) +from .oauth2 import ( + Oauth2Resource, + AsyncOauth2Resource, + Oauth2ResourceWithRawResponse, + AsyncOauth2ResourceWithRawResponse, + Oauth2ResourceWithStreamingResponse, + AsyncOauth2ResourceWithStreamingResponse, +) from .openai import ( OpenAIResource, AsyncOpenAIResource, @@ -66,6 +74,12 @@ "AsyncOpenAIResourceWithRawResponse", "OpenAIResourceWithStreamingResponse", "AsyncOpenAIResourceWithStreamingResponse", + "Oauth2Resource", + "AsyncOauth2Resource", + "Oauth2ResourceWithRawResponse", + "AsyncOauth2ResourceWithRawResponse", + "Oauth2ResourceWithStreamingResponse", + "AsyncOauth2ResourceWithStreamingResponse", "EvaluationMetricsResource", "AsyncEvaluationMetricsResource", "EvaluationMetricsResourceWithRawResponse", diff --git a/src/gradient/resources/agents/evaluation_metrics/evaluation_metrics.py b/src/gradient/resources/agents/evaluation_metrics/evaluation_metrics.py index f6453d4d..b9e1386b 100644 --- a/src/gradient/resources/agents/evaluation_metrics/evaluation_metrics.py +++ b/src/gradient/resources/agents/evaluation_metrics/evaluation_metrics.py @@ -22,6 +22,14 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) +from .oauth2.oauth2 import ( + Oauth2Resource, + AsyncOauth2Resource, + Oauth2ResourceWithRawResponse, + AsyncOauth2ResourceWithRawResponse, + Oauth2ResourceWithStreamingResponse, + AsyncOauth2ResourceWithStreamingResponse, +) from .openai.openai import ( OpenAIResource, AsyncOpenAIResource, @@ -71,6 +79,10 @@ def anthropic(self) -> AnthropicResource: def openai(self) -> OpenAIResource: return OpenAIResource(self._client) + @cached_property + def oauth2(self) -> Oauth2Resource: + return Oauth2Resource(self._client) + @cached_property def with_raw_response(self) -> EvaluationMetricsResourceWithRawResponse: """ @@ -180,6 +192,10 @@ def anthropic(self) -> AsyncAnthropicResource: def openai(self) -> AsyncOpenAIResource: return AsyncOpenAIResource(self._client) + @cached_property + def oauth2(self) -> AsyncOauth2Resource: + return AsyncOauth2Resource(self._client) + @cached_property def with_raw_response(self) -> AsyncEvaluationMetricsResourceWithRawResponse: """ @@ -299,6 +315,10 @@ def anthropic(self) -> AnthropicResourceWithRawResponse: def openai(self) -> OpenAIResourceWithRawResponse: return OpenAIResourceWithRawResponse(self._evaluation_metrics.openai) + @cached_property + def oauth2(self) -> Oauth2ResourceWithRawResponse: + return Oauth2ResourceWithRawResponse(self._evaluation_metrics.oauth2) + class AsyncEvaluationMetricsResourceWithRawResponse: def __init__(self, evaluation_metrics: AsyncEvaluationMetricsResource) -> None: @@ -327,6 +347,10 @@ def anthropic(self) -> AsyncAnthropicResourceWithRawResponse: def openai(self) -> AsyncOpenAIResourceWithRawResponse: return AsyncOpenAIResourceWithRawResponse(self._evaluation_metrics.openai) + @cached_property + def oauth2(self) -> AsyncOauth2ResourceWithRawResponse: + return AsyncOauth2ResourceWithRawResponse(self._evaluation_metrics.oauth2) + class EvaluationMetricsResourceWithStreamingResponse: def __init__(self, evaluation_metrics: EvaluationMetricsResource) -> None: @@ -355,6 +379,10 @@ def anthropic(self) -> AnthropicResourceWithStreamingResponse: def openai(self) -> OpenAIResourceWithStreamingResponse: return OpenAIResourceWithStreamingResponse(self._evaluation_metrics.openai) + @cached_property + def oauth2(self) -> Oauth2ResourceWithStreamingResponse: + return Oauth2ResourceWithStreamingResponse(self._evaluation_metrics.oauth2) + class AsyncEvaluationMetricsResourceWithStreamingResponse: def __init__(self, evaluation_metrics: AsyncEvaluationMetricsResource) -> None: @@ -382,3 +410,7 @@ def anthropic(self) -> AsyncAnthropicResourceWithStreamingResponse: @cached_property def openai(self) -> AsyncOpenAIResourceWithStreamingResponse: return AsyncOpenAIResourceWithStreamingResponse(self._evaluation_metrics.openai) + + @cached_property + def oauth2(self) -> AsyncOauth2ResourceWithStreamingResponse: + return AsyncOauth2ResourceWithStreamingResponse(self._evaluation_metrics.oauth2) diff --git a/src/gradient/resources/agents/evaluation_metrics/oauth2/__init__.py b/src/gradient/resources/agents/evaluation_metrics/oauth2/__init__.py new file mode 100644 index 00000000..c74ddfe8 --- /dev/null +++ b/src/gradient/resources/agents/evaluation_metrics/oauth2/__init__.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .oauth2 import ( + Oauth2Resource, + AsyncOauth2Resource, + Oauth2ResourceWithRawResponse, + AsyncOauth2ResourceWithRawResponse, + Oauth2ResourceWithStreamingResponse, + AsyncOauth2ResourceWithStreamingResponse, +) +from .dropbox import ( + DropboxResource, + AsyncDropboxResource, + DropboxResourceWithRawResponse, + AsyncDropboxResourceWithRawResponse, + DropboxResourceWithStreamingResponse, + AsyncDropboxResourceWithStreamingResponse, +) + +__all__ = [ + "DropboxResource", + "AsyncDropboxResource", + "DropboxResourceWithRawResponse", + "AsyncDropboxResourceWithRawResponse", + "DropboxResourceWithStreamingResponse", + "AsyncDropboxResourceWithStreamingResponse", + "Oauth2Resource", + "AsyncOauth2Resource", + "Oauth2ResourceWithRawResponse", + "AsyncOauth2ResourceWithRawResponse", + "Oauth2ResourceWithStreamingResponse", + "AsyncOauth2ResourceWithStreamingResponse", +] diff --git a/src/gradient/resources/agents/evaluation_metrics/oauth2/dropbox.py b/src/gradient/resources/agents/evaluation_metrics/oauth2/dropbox.py new file mode 100644 index 00000000..caa7d0d3 --- /dev/null +++ b/src/gradient/resources/agents/evaluation_metrics/oauth2/dropbox.py @@ -0,0 +1,193 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from ....._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ....._utils import maybe_transform, async_maybe_transform +from ....._compat import cached_property +from ....._resource import SyncAPIResource, AsyncAPIResource +from ....._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ....._base_client import make_request_options +from .....types.agents.evaluation_metrics.oauth2 import dropbox_create_tokens_params +from .....types.agents.evaluation_metrics.oauth2.dropbox_create_tokens_response import DropboxCreateTokensResponse + +__all__ = ["DropboxResource", "AsyncDropboxResource"] + + +class DropboxResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> DropboxResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers + """ + return DropboxResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> DropboxResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response + """ + return DropboxResourceWithStreamingResponse(self) + + def create_tokens( + self, + *, + code: str | NotGiven = NOT_GIVEN, + redirect_url: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> DropboxCreateTokensResponse: + """ + To obtain the refresh token, needed for creation of data sources, send a GET + request to `/v2/gen-ai/oauth2/dropbox/tokens`. Pass the code you obtrained from + the oauth flow in the field 'code' + + Args: + code: The oauth2 code from google + + redirect_url: Redirect url + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/v2/gen-ai/oauth2/dropbox/tokens" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/oauth2/dropbox/tokens", + body=maybe_transform( + { + "code": code, + "redirect_url": redirect_url, + }, + dropbox_create_tokens_params.DropboxCreateTokensParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=DropboxCreateTokensResponse, + ) + + +class AsyncDropboxResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncDropboxResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers + """ + return AsyncDropboxResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncDropboxResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response + """ + return AsyncDropboxResourceWithStreamingResponse(self) + + async def create_tokens( + self, + *, + code: str | NotGiven = NOT_GIVEN, + redirect_url: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> DropboxCreateTokensResponse: + """ + To obtain the refresh token, needed for creation of data sources, send a GET + request to `/v2/gen-ai/oauth2/dropbox/tokens`. Pass the code you obtrained from + the oauth flow in the field 'code' + + Args: + code: The oauth2 code from google + + redirect_url: Redirect url + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/v2/gen-ai/oauth2/dropbox/tokens" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/oauth2/dropbox/tokens", + body=await async_maybe_transform( + { + "code": code, + "redirect_url": redirect_url, + }, + dropbox_create_tokens_params.DropboxCreateTokensParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=DropboxCreateTokensResponse, + ) + + +class DropboxResourceWithRawResponse: + def __init__(self, dropbox: DropboxResource) -> None: + self._dropbox = dropbox + + self.create_tokens = to_raw_response_wrapper( + dropbox.create_tokens, + ) + + +class AsyncDropboxResourceWithRawResponse: + def __init__(self, dropbox: AsyncDropboxResource) -> None: + self._dropbox = dropbox + + self.create_tokens = async_to_raw_response_wrapper( + dropbox.create_tokens, + ) + + +class DropboxResourceWithStreamingResponse: + def __init__(self, dropbox: DropboxResource) -> None: + self._dropbox = dropbox + + self.create_tokens = to_streamed_response_wrapper( + dropbox.create_tokens, + ) + + +class AsyncDropboxResourceWithStreamingResponse: + def __init__(self, dropbox: AsyncDropboxResource) -> None: + self._dropbox = dropbox + + self.create_tokens = async_to_streamed_response_wrapper( + dropbox.create_tokens, + ) diff --git a/src/gradient/resources/agents/evaluation_metrics/oauth2/oauth2.py b/src/gradient/resources/agents/evaluation_metrics/oauth2/oauth2.py new file mode 100644 index 00000000..8063ce5a --- /dev/null +++ b/src/gradient/resources/agents/evaluation_metrics/oauth2/oauth2.py @@ -0,0 +1,229 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from .dropbox import ( + DropboxResource, + AsyncDropboxResource, + DropboxResourceWithRawResponse, + AsyncDropboxResourceWithRawResponse, + DropboxResourceWithStreamingResponse, + AsyncDropboxResourceWithStreamingResponse, +) +from ....._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ....._utils import maybe_transform, async_maybe_transform +from ....._compat import cached_property +from ....._resource import SyncAPIResource, AsyncAPIResource +from ....._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ....._base_client import make_request_options +from .....types.agents.evaluation_metrics import oauth2_generate_url_params +from .....types.agents.evaluation_metrics.oauth2_generate_url_response import Oauth2GenerateURLResponse + +__all__ = ["Oauth2Resource", "AsyncOauth2Resource"] + + +class Oauth2Resource(SyncAPIResource): + @cached_property + def dropbox(self) -> DropboxResource: + return DropboxResource(self._client) + + @cached_property + def with_raw_response(self) -> Oauth2ResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers + """ + return Oauth2ResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> Oauth2ResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response + """ + return Oauth2ResourceWithStreamingResponse(self) + + def generate_url( + self, + *, + redirect_url: str | NotGiven = NOT_GIVEN, + type: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Oauth2GenerateURLResponse: + """ + To generate an Oauth2-URL for use with your localhost, send a GET request to + `/v2/gen-ai/oauth2/url`. Pass 'http://localhost:3000 as redirect_url + + Args: + redirect_url: The redirect url. + + type: Type "google" / "dropbox". + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/v2/gen-ai/oauth2/url" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/oauth2/url", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "redirect_url": redirect_url, + "type": type, + }, + oauth2_generate_url_params.Oauth2GenerateURLParams, + ), + ), + cast_to=Oauth2GenerateURLResponse, + ) + + +class AsyncOauth2Resource(AsyncAPIResource): + @cached_property + def dropbox(self) -> AsyncDropboxResource: + return AsyncDropboxResource(self._client) + + @cached_property + def with_raw_response(self) -> AsyncOauth2ResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers + """ + return AsyncOauth2ResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncOauth2ResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response + """ + return AsyncOauth2ResourceWithStreamingResponse(self) + + async def generate_url( + self, + *, + redirect_url: str | NotGiven = NOT_GIVEN, + type: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Oauth2GenerateURLResponse: + """ + To generate an Oauth2-URL for use with your localhost, send a GET request to + `/v2/gen-ai/oauth2/url`. Pass 'http://localhost:3000 as redirect_url + + Args: + redirect_url: The redirect url. + + type: Type "google" / "dropbox". + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/v2/gen-ai/oauth2/url" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/oauth2/url", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "redirect_url": redirect_url, + "type": type, + }, + oauth2_generate_url_params.Oauth2GenerateURLParams, + ), + ), + cast_to=Oauth2GenerateURLResponse, + ) + + +class Oauth2ResourceWithRawResponse: + def __init__(self, oauth2: Oauth2Resource) -> None: + self._oauth2 = oauth2 + + self.generate_url = to_raw_response_wrapper( + oauth2.generate_url, + ) + + @cached_property + def dropbox(self) -> DropboxResourceWithRawResponse: + return DropboxResourceWithRawResponse(self._oauth2.dropbox) + + +class AsyncOauth2ResourceWithRawResponse: + def __init__(self, oauth2: AsyncOauth2Resource) -> None: + self._oauth2 = oauth2 + + self.generate_url = async_to_raw_response_wrapper( + oauth2.generate_url, + ) + + @cached_property + def dropbox(self) -> AsyncDropboxResourceWithRawResponse: + return AsyncDropboxResourceWithRawResponse(self._oauth2.dropbox) + + +class Oauth2ResourceWithStreamingResponse: + def __init__(self, oauth2: Oauth2Resource) -> None: + self._oauth2 = oauth2 + + self.generate_url = to_streamed_response_wrapper( + oauth2.generate_url, + ) + + @cached_property + def dropbox(self) -> DropboxResourceWithStreamingResponse: + return DropboxResourceWithStreamingResponse(self._oauth2.dropbox) + + +class AsyncOauth2ResourceWithStreamingResponse: + def __init__(self, oauth2: AsyncOauth2Resource) -> None: + self._oauth2 = oauth2 + + self.generate_url = async_to_streamed_response_wrapper( + oauth2.generate_url, + ) + + @cached_property + def dropbox(self) -> AsyncDropboxResourceWithStreamingResponse: + return AsyncDropboxResourceWithStreamingResponse(self._oauth2.dropbox) diff --git a/src/gradient/types/__init__.py b/src/gradient/types/__init__.py index d3d7dab0..d28c4c1d 100644 --- a/src/gradient/types/__init__.py +++ b/src/gradient/types/__init__.py @@ -79,6 +79,7 @@ from .knowledge_base_list_params import ( KnowledgeBaseListParams as KnowledgeBaseListParams, ) +from .agent_retrieve_usage_params import AgentRetrieveUsageParams as AgentRetrieveUsageParams from .droplet_backup_policy_param import ( DropletBackupPolicyParam as DropletBackupPolicyParam, ) @@ -97,6 +98,7 @@ from .knowledge_base_update_params import ( KnowledgeBaseUpdateParams as KnowledgeBaseUpdateParams, ) +from .agent_retrieve_usage_response import AgentRetrieveUsageResponse as AgentRetrieveUsageResponse from .gpu_droplet_retrieve_response import ( GPUDropletRetrieveResponse as GPUDropletRetrieveResponse, ) diff --git a/src/gradient/types/agent_retrieve_usage_params.py b/src/gradient/types/agent_retrieve_usage_params.py new file mode 100644 index 00000000..f5471151 --- /dev/null +++ b/src/gradient/types/agent_retrieve_usage_params.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["AgentRetrieveUsageParams"] + + +class AgentRetrieveUsageParams(TypedDict, total=False): + start: str + """Return all usage data from this date.""" + + stop: str + """ + Return all usage data up to this date, if omitted, will return up to the current + date. + """ diff --git a/src/gradient/types/agent_retrieve_usage_response.py b/src/gradient/types/agent_retrieve_usage_response.py new file mode 100644 index 00000000..1d65addd --- /dev/null +++ b/src/gradient/types/agent_retrieve_usage_response.py @@ -0,0 +1,48 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from datetime import datetime + +from .._models import BaseModel + +__all__ = ["AgentRetrieveUsageResponse", "LogInsightsUsage", "LogInsightsUsageMeasurement", "Usage", "UsageMeasurement"] + + +class LogInsightsUsageMeasurement(BaseModel): + tokens: Optional[int] = None + + usage_type: Optional[str] = None + + +class LogInsightsUsage(BaseModel): + measurements: Optional[List[LogInsightsUsageMeasurement]] = None + + resource_uuid: Optional[str] = None + + start: Optional[datetime] = None + + stop: Optional[datetime] = None + + +class UsageMeasurement(BaseModel): + tokens: Optional[int] = None + + usage_type: Optional[str] = None + + +class Usage(BaseModel): + measurements: Optional[List[UsageMeasurement]] = None + + resource_uuid: Optional[str] = None + + start: Optional[datetime] = None + + stop: Optional[datetime] = None + + +class AgentRetrieveUsageResponse(BaseModel): + log_insights_usage: Optional[LogInsightsUsage] = None + """Resource Usage Description""" + + usage: Optional[Usage] = None + """Resource Usage Description""" diff --git a/src/gradient/types/agents/evaluation_metrics/__init__.py b/src/gradient/types/agents/evaluation_metrics/__init__.py index 12ca91f3..01ce5ed2 100644 --- a/src/gradient/types/agents/evaluation_metrics/__init__.py +++ b/src/gradient/types/agents/evaluation_metrics/__init__.py @@ -15,7 +15,9 @@ from .workspace_create_response import WorkspaceCreateResponse as WorkspaceCreateResponse from .workspace_delete_response import WorkspaceDeleteResponse as WorkspaceDeleteResponse from .workspace_update_response import WorkspaceUpdateResponse as WorkspaceUpdateResponse +from .oauth2_generate_url_params import Oauth2GenerateURLParams as Oauth2GenerateURLParams from .workspace_retrieve_response import WorkspaceRetrieveResponse as WorkspaceRetrieveResponse +from .oauth2_generate_url_response import Oauth2GenerateURLResponse as Oauth2GenerateURLResponse from .workspace_list_evaluation_test_cases_response import ( WorkspaceListEvaluationTestCasesResponse as WorkspaceListEvaluationTestCasesResponse, ) diff --git a/src/gradient/types/agents/evaluation_metrics/oauth2/__init__.py b/src/gradient/types/agents/evaluation_metrics/oauth2/__init__.py new file mode 100644 index 00000000..e686ce35 --- /dev/null +++ b/src/gradient/types/agents/evaluation_metrics/oauth2/__init__.py @@ -0,0 +1,6 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .dropbox_create_tokens_params import DropboxCreateTokensParams as DropboxCreateTokensParams +from .dropbox_create_tokens_response import DropboxCreateTokensResponse as DropboxCreateTokensResponse diff --git a/src/gradient/types/agents/evaluation_metrics/oauth2/dropbox_create_tokens_params.py b/src/gradient/types/agents/evaluation_metrics/oauth2/dropbox_create_tokens_params.py new file mode 100644 index 00000000..00d22cce --- /dev/null +++ b/src/gradient/types/agents/evaluation_metrics/oauth2/dropbox_create_tokens_params.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["DropboxCreateTokensParams"] + + +class DropboxCreateTokensParams(TypedDict, total=False): + code: str + """The oauth2 code from google""" + + redirect_url: str + """Redirect url""" diff --git a/src/gradient/types/agents/evaluation_metrics/oauth2/dropbox_create_tokens_response.py b/src/gradient/types/agents/evaluation_metrics/oauth2/dropbox_create_tokens_response.py new file mode 100644 index 00000000..816b89f4 --- /dev/null +++ b/src/gradient/types/agents/evaluation_metrics/oauth2/dropbox_create_tokens_response.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ....._models import BaseModel + +__all__ = ["DropboxCreateTokensResponse"] + + +class DropboxCreateTokensResponse(BaseModel): + token: Optional[str] = None + """The access token""" + + refresh_token: Optional[str] = None + """The refresh token""" diff --git a/src/gradient/types/agents/evaluation_metrics/oauth2_generate_url_params.py b/src/gradient/types/agents/evaluation_metrics/oauth2_generate_url_params.py new file mode 100644 index 00000000..68924774 --- /dev/null +++ b/src/gradient/types/agents/evaluation_metrics/oauth2_generate_url_params.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["Oauth2GenerateURLParams"] + + +class Oauth2GenerateURLParams(TypedDict, total=False): + redirect_url: str + """The redirect url.""" + + type: str + """Type "google" / "dropbox".""" diff --git a/src/gradient/types/agents/evaluation_metrics/oauth2_generate_url_response.py b/src/gradient/types/agents/evaluation_metrics/oauth2_generate_url_response.py new file mode 100644 index 00000000..8be21b8a --- /dev/null +++ b/src/gradient/types/agents/evaluation_metrics/oauth2_generate_url_response.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ...._models import BaseModel + +__all__ = ["Oauth2GenerateURLResponse"] + + +class Oauth2GenerateURLResponse(BaseModel): + url: Optional[str] = None + """The oauth2 url""" diff --git a/tests/api_resources/agents/evaluation_metrics/oauth2/__init__.py b/tests/api_resources/agents/evaluation_metrics/oauth2/__init__.py new file mode 100644 index 00000000..fd8019a9 --- /dev/null +++ b/tests/api_resources/agents/evaluation_metrics/oauth2/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/agents/evaluation_metrics/oauth2/test_dropbox.py b/tests/api_resources/agents/evaluation_metrics/oauth2/test_dropbox.py new file mode 100644 index 00000000..417bb3b1 --- /dev/null +++ b/tests/api_resources/agents/evaluation_metrics/oauth2/test_dropbox.py @@ -0,0 +1,100 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from gradient import Gradient, AsyncGradient +from tests.utils import assert_matches_type +from gradient.types.agents.evaluation_metrics.oauth2 import DropboxCreateTokensResponse + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestDropbox: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create_tokens(self, client: Gradient) -> None: + dropbox = client.agents.evaluation_metrics.oauth2.dropbox.create_tokens() + assert_matches_type(DropboxCreateTokensResponse, dropbox, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_create_tokens_with_all_params(self, client: Gradient) -> None: + dropbox = client.agents.evaluation_metrics.oauth2.dropbox.create_tokens( + code="example string", + redirect_url="example string", + ) + assert_matches_type(DropboxCreateTokensResponse, dropbox, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_create_tokens(self, client: Gradient) -> None: + response = client.agents.evaluation_metrics.oauth2.dropbox.with_raw_response.create_tokens() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + dropbox = response.parse() + assert_matches_type(DropboxCreateTokensResponse, dropbox, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_create_tokens(self, client: Gradient) -> None: + with client.agents.evaluation_metrics.oauth2.dropbox.with_streaming_response.create_tokens() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + dropbox = response.parse() + assert_matches_type(DropboxCreateTokensResponse, dropbox, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncDropbox: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create_tokens(self, async_client: AsyncGradient) -> None: + dropbox = await async_client.agents.evaluation_metrics.oauth2.dropbox.create_tokens() + assert_matches_type(DropboxCreateTokensResponse, dropbox, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_create_tokens_with_all_params(self, async_client: AsyncGradient) -> None: + dropbox = await async_client.agents.evaluation_metrics.oauth2.dropbox.create_tokens( + code="example string", + redirect_url="example string", + ) + assert_matches_type(DropboxCreateTokensResponse, dropbox, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_create_tokens(self, async_client: AsyncGradient) -> None: + response = await async_client.agents.evaluation_metrics.oauth2.dropbox.with_raw_response.create_tokens() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + dropbox = await response.parse() + assert_matches_type(DropboxCreateTokensResponse, dropbox, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_create_tokens(self, async_client: AsyncGradient) -> None: + async with ( + async_client.agents.evaluation_metrics.oauth2.dropbox.with_streaming_response.create_tokens() + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + dropbox = await response.parse() + assert_matches_type(DropboxCreateTokensResponse, dropbox, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/agents/evaluation_metrics/test_oauth2.py b/tests/api_resources/agents/evaluation_metrics/test_oauth2.py new file mode 100644 index 00000000..f247d94f --- /dev/null +++ b/tests/api_resources/agents/evaluation_metrics/test_oauth2.py @@ -0,0 +1,98 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from gradient import Gradient, AsyncGradient +from tests.utils import assert_matches_type +from gradient.types.agents.evaluation_metrics import Oauth2GenerateURLResponse + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestOauth2: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_generate_url(self, client: Gradient) -> None: + oauth2 = client.agents.evaluation_metrics.oauth2.generate_url() + assert_matches_type(Oauth2GenerateURLResponse, oauth2, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_generate_url_with_all_params(self, client: Gradient) -> None: + oauth2 = client.agents.evaluation_metrics.oauth2.generate_url( + redirect_url="redirect_url", + type="type", + ) + assert_matches_type(Oauth2GenerateURLResponse, oauth2, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_generate_url(self, client: Gradient) -> None: + response = client.agents.evaluation_metrics.oauth2.with_raw_response.generate_url() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + oauth2 = response.parse() + assert_matches_type(Oauth2GenerateURLResponse, oauth2, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_generate_url(self, client: Gradient) -> None: + with client.agents.evaluation_metrics.oauth2.with_streaming_response.generate_url() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + oauth2 = response.parse() + assert_matches_type(Oauth2GenerateURLResponse, oauth2, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncOauth2: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_generate_url(self, async_client: AsyncGradient) -> None: + oauth2 = await async_client.agents.evaluation_metrics.oauth2.generate_url() + assert_matches_type(Oauth2GenerateURLResponse, oauth2, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_generate_url_with_all_params(self, async_client: AsyncGradient) -> None: + oauth2 = await async_client.agents.evaluation_metrics.oauth2.generate_url( + redirect_url="redirect_url", + type="type", + ) + assert_matches_type(Oauth2GenerateURLResponse, oauth2, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_generate_url(self, async_client: AsyncGradient) -> None: + response = await async_client.agents.evaluation_metrics.oauth2.with_raw_response.generate_url() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + oauth2 = await response.parse() + assert_matches_type(Oauth2GenerateURLResponse, oauth2, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_generate_url(self, async_client: AsyncGradient) -> None: + async with async_client.agents.evaluation_metrics.oauth2.with_streaming_response.generate_url() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + oauth2 = await response.parse() + assert_matches_type(Oauth2GenerateURLResponse, oauth2, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/test_agents.py b/tests/api_resources/test_agents.py index dd4dbdc4..6d040d18 100644 --- a/tests/api_resources/test_agents.py +++ b/tests/api_resources/test_agents.py @@ -16,6 +16,7 @@ AgentUpdateResponse, AgentRetrieveResponse, AgentUpdateStatusResponse, + AgentRetrieveUsageResponse, ) base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -258,6 +259,58 @@ def test_path_params_delete(self, client: Gradient) -> None: "", ) + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve_usage(self, client: Gradient) -> None: + agent = client.agents.retrieve_usage( + uuid='"123e4567-e89b-12d3-a456-426614174000"', + ) + assert_matches_type(AgentRetrieveUsageResponse, agent, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_method_retrieve_usage_with_all_params(self, client: Gradient) -> None: + agent = client.agents.retrieve_usage( + uuid='"123e4567-e89b-12d3-a456-426614174000"', + start="start", + stop="stop", + ) + assert_matches_type(AgentRetrieveUsageResponse, agent, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_raw_response_retrieve_usage(self, client: Gradient) -> None: + response = client.agents.with_raw_response.retrieve_usage( + uuid='"123e4567-e89b-12d3-a456-426614174000"', + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + agent = response.parse() + assert_matches_type(AgentRetrieveUsageResponse, agent, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_streaming_response_retrieve_usage(self, client: Gradient) -> None: + with client.agents.with_streaming_response.retrieve_usage( + uuid='"123e4567-e89b-12d3-a456-426614174000"', + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + agent = response.parse() + assert_matches_type(AgentRetrieveUsageResponse, agent, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + def test_path_params_retrieve_usage(self, client: Gradient) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): + client.agents.with_raw_response.retrieve_usage( + uuid="", + ) + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_status(self, client: Gradient) -> None: @@ -550,6 +603,58 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None: "", ) + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve_usage(self, async_client: AsyncGradient) -> None: + agent = await async_client.agents.retrieve_usage( + uuid='"123e4567-e89b-12d3-a456-426614174000"', + ) + assert_matches_type(AgentRetrieveUsageResponse, agent, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_method_retrieve_usage_with_all_params(self, async_client: AsyncGradient) -> None: + agent = await async_client.agents.retrieve_usage( + uuid='"123e4567-e89b-12d3-a456-426614174000"', + start="start", + stop="stop", + ) + assert_matches_type(AgentRetrieveUsageResponse, agent, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_raw_response_retrieve_usage(self, async_client: AsyncGradient) -> None: + response = await async_client.agents.with_raw_response.retrieve_usage( + uuid='"123e4567-e89b-12d3-a456-426614174000"', + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + agent = await response.parse() + assert_matches_type(AgentRetrieveUsageResponse, agent, path=["response"]) + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_streaming_response_retrieve_usage(self, async_client: AsyncGradient) -> None: + async with async_client.agents.with_streaming_response.retrieve_usage( + uuid='"123e4567-e89b-12d3-a456-426614174000"', + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + agent = await response.parse() + assert_matches_type(AgentRetrieveUsageResponse, agent, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip(reason="Prism tests are disabled") + @parametrize + async def test_path_params_retrieve_usage(self, async_client: AsyncGradient) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): + await async_client.agents.with_raw_response.retrieve_usage( + uuid="", + ) + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_status(self, async_client: AsyncGradient) -> None: From 55255fb5d51bca4204f5e741024f4184da465d78 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 17 Sep 2025 03:17:20 +0000 Subject: [PATCH 09/11] chore(internal): update pydantic dependency --- requirements-dev.lock | 7 +++++-- requirements.lock | 7 +++++-- src/gradient/_models.py | 14 ++++++++++---- 3 files changed, 20 insertions(+), 8 deletions(-) diff --git a/requirements-dev.lock b/requirements-dev.lock index af44e06b..896c8c3a 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -88,9 +88,9 @@ pluggy==1.5.0 propcache==0.3.1 # via aiohttp # via yarl -pydantic==2.10.3 +pydantic==2.11.9 # via gradient -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via pydantic pygments==2.18.0 # via rich @@ -126,6 +126,9 @@ typing-extensions==4.12.2 # via pydantic # via pydantic-core # via pyright + # via typing-inspection +typing-inspection==0.4.1 + # via pydantic virtualenv==20.24.5 # via nox yarl==1.20.0 diff --git a/requirements.lock b/requirements.lock index f9072669..1fce47a6 100644 --- a/requirements.lock +++ b/requirements.lock @@ -55,9 +55,9 @@ multidict==6.4.4 propcache==0.3.1 # via aiohttp # via yarl -pydantic==2.10.3 +pydantic==2.11.9 # via gradient -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via pydantic sniffio==1.3.0 # via anyio @@ -68,5 +68,8 @@ typing-extensions==4.12.2 # via multidict # via pydantic # via pydantic-core + # via typing-inspection +typing-inspection==0.4.1 + # via pydantic yarl==1.20.0 # via aiohttp diff --git a/src/gradient/_models.py b/src/gradient/_models.py index 3a6017ef..6a3cd1d2 100644 --- a/src/gradient/_models.py +++ b/src/gradient/_models.py @@ -256,7 +256,7 @@ def model_dump( mode: Literal["json", "python"] | str = "python", include: IncEx | None = None, exclude: IncEx | None = None, - by_alias: bool = False, + by_alias: bool | None = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, @@ -264,6 +264,7 @@ def model_dump( warnings: bool | Literal["none", "warn", "error"] = True, context: dict[str, Any] | None = None, serialize_as_any: bool = False, + fallback: Callable[[Any], Any] | None = None, ) -> dict[str, Any]: """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump @@ -295,10 +296,12 @@ def model_dump( raise ValueError("context is only supported in Pydantic v2") if serialize_as_any != False: raise ValueError("serialize_as_any is only supported in Pydantic v2") + if fallback is not None: + raise ValueError("fallback is only supported in Pydantic v2") dumped = super().dict( # pyright: ignore[reportDeprecated] include=include, exclude=exclude, - by_alias=by_alias, + by_alias=by_alias if by_alias is not None else False, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, @@ -313,13 +316,14 @@ def model_dump_json( indent: int | None = None, include: IncEx | None = None, exclude: IncEx | None = None, - by_alias: bool = False, + by_alias: bool | None = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, round_trip: bool = False, warnings: bool | Literal["none", "warn", "error"] = True, context: dict[str, Any] | None = None, + fallback: Callable[[Any], Any] | None = None, serialize_as_any: bool = False, ) -> str: """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json @@ -348,11 +352,13 @@ def model_dump_json( raise ValueError("context is only supported in Pydantic v2") if serialize_as_any != False: raise ValueError("serialize_as_any is only supported in Pydantic v2") + if fallback is not None: + raise ValueError("fallback is only supported in Pydantic v2") return super().json( # type: ignore[reportDeprecated] indent=indent, include=include, exclude=exclude, - by_alias=by_alias, + by_alias=by_alias if by_alias is not None else False, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, From 7212f62b6d3a5bbc7c8422a7fd8f336d22792049 Mon Sep 17 00:00:00 2001 From: Ben Batha Date: Wed, 17 Sep 2025 12:35:08 -0400 Subject: [PATCH 10/11] chore: clean up LICENSING after legal review (#49) --- LICENSE | 2 +- README.md | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/LICENSE b/LICENSE index 656d8887..5ab1db50 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2025 Gradient + Copyright 2025 DigitalOcean, LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/README.md b/README.md index b1e71b60..3e32f833 100644 --- a/README.md +++ b/README.md @@ -541,3 +541,8 @@ Python 3.8 or higher. ## Contributing See [the contributing documentation](./CONTRIBUTING.md). + + +## License + +Licensed under the Apache License 2.0. See [LICENSE](./LICENSE) \ No newline at end of file From 2fdd9db1f575730f6ab5f6a19adda98ca641e2ac Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 17 Sep 2025 16:35:32 +0000 Subject: [PATCH 11/11] release: 3.0.0-beta.6 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 19 +++++++++++++++++++ pyproject.toml | 2 +- src/gradient/_version.py | 2 +- 4 files changed, 22 insertions(+), 3 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 9dcd5cc8..3c4dbee7 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.0.0-beta.5" + ".": "3.0.0-beta.6" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 85fdc0d1..9d1760d0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## 3.0.0-beta.6 (2025-09-17) + +Full Changelog: [v3.0.0-beta.5...v3.0.0-beta.6](https://github.com/digitalocean/gradient-python/compare/v3.0.0-beta.5...v3.0.0-beta.6) + +### Features + +* **api:** enable typescript ([c17086a](https://github.com/digitalocean/gradient-python/commit/c17086aaed18fbb8ba85f050556a193cdc4a233f)) +* improve future compat with pydantic v3 ([300eac0](https://github.com/digitalocean/gradient-python/commit/300eac0417f8f17a65bb871b15de1254f4677558)) +* normalize user agent with other do clients ([85bc8eb](https://github.com/digitalocean/gradient-python/commit/85bc8eb26afdfd7deb28ce2198eb3ef02181b95f)) +* **types:** replace List[str] with SequenceNotStr in params ([5a6aa92](https://github.com/digitalocean/gradient-python/commit/5a6aa9241b5e7c2f4319caa14d62f41c0c824f9e)) + + +### Chores + +* clean up LICENSING after legal review ([#49](https://github.com/digitalocean/gradient-python/issues/49)) ([7212f62](https://github.com/digitalocean/gradient-python/commit/7212f62b6d3a5bbc7c8422a7fd8f336d22792049)) +* **internal:** move mypy configurations to `pyproject.toml` file ([25c0448](https://github.com/digitalocean/gradient-python/commit/25c044818b636e3307af2fefd2add15a6e650e8d)) +* **internal:** update pydantic dependency ([55255fb](https://github.com/digitalocean/gradient-python/commit/55255fb5d51bca4204f5e741024f4184da465d78)) +* **tests:** simplify `get_platform` test ([b839e4b](https://github.com/digitalocean/gradient-python/commit/b839e4b31c1262157544bd69536051a10d6b098d)) + ## 3.0.0-beta.5 (2025-09-08) Full Changelog: [v3.0.0-beta.4...v3.0.0-beta.5](https://github.com/digitalocean/gradient-python/compare/v3.0.0-beta.4...v3.0.0-beta.5) diff --git a/pyproject.toml b/pyproject.toml index 5fc8df7f..d6232c97 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "gradient" -version = "3.0.0-beta.5" +version = "3.0.0-beta.6" description = "The official Python library for the Gradient API" dynamic = ["readme"] license = "Apache-2.0" diff --git a/src/gradient/_version.py b/src/gradient/_version.py index c7adeab4..81080cc3 100644 --- a/src/gradient/_version.py +++ b/src/gradient/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "gradient" -__version__ = "3.0.0-beta.5" # x-release-please-version +__version__ = "3.0.0-beta.6" # x-release-please-version