diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index 3b005e52..ee49ac2d 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.1.0-alpha.10"
+ ".": "0.1.0-alpha.11"
}
\ No newline at end of file
diff --git a/.stats.yml b/.stats.yml
index ce83998c..0a6b7a71 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
configured_endpoints: 76
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradientai-e8b3cbc80e18e4f7f277010349f25e1319156704f359911dc464cc21a0d077a6.yml
openapi_spec_hash: c773d792724f5647ae25a5ae4ccec208
-config_hash: 9b44ce3fd39c43f2001bc11934e6b1b0
+config_hash: 1c936b3bd798c3fcb25479b19efa999a
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2969f29b..a47891f9 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,19 @@
# Changelog
+## 0.1.0-alpha.11 (2025-06-28)
+
+Full Changelog: [v0.1.0-alpha.10...v0.1.0-alpha.11](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.10...v0.1.0-alpha.11)
+
+### Features
+
+* **api:** manual updates ([8d918dc](https://github.com/digitalocean/gradientai-python/commit/8d918dcc45f03d799b3aed4e94276086e2d7ea9b))
+
+
+### Chores
+
+* **ci:** only run for pushes and fork pull requests ([adfb5b5](https://github.com/digitalocean/gradientai-python/commit/adfb5b51149f667bf9a0b4b4c4c6418e91f843d8))
+* Move model providers ([8d918dc](https://github.com/digitalocean/gradientai-python/commit/8d918dcc45f03d799b3aed4e94276086e2d7ea9b))
+
## 0.1.0-alpha.10 (2025-06-28)
Full Changelog: [v0.1.0-alpha.9...v0.1.0-alpha.10](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.9...v0.1.0-alpha.10)
diff --git a/api.md b/api.md
index fc8d20eb..65699eaa 100644
--- a/api.md
+++ b/api.md
@@ -267,60 +267,6 @@ Methods:
- client.chat.completions.create(\*\*params) -> CompletionCreateResponse
-# ModelProviders
-
-## Anthropic
-
-### Keys
-
-Types:
-
-```python
-from gradientai.types.model_providers.anthropic import (
- KeyCreateResponse,
- KeyRetrieveResponse,
- KeyUpdateResponse,
- KeyListResponse,
- KeyDeleteResponse,
- KeyListAgentsResponse,
-)
-```
-
-Methods:
-
-- client.model_providers.anthropic.keys.create(\*\*params) -> KeyCreateResponse
-- client.model_providers.anthropic.keys.retrieve(api_key_uuid) -> KeyRetrieveResponse
-- client.model_providers.anthropic.keys.update(path_api_key_uuid, \*\*params) -> KeyUpdateResponse
-- client.model_providers.anthropic.keys.list(\*\*params) -> KeyListResponse
-- client.model_providers.anthropic.keys.delete(api_key_uuid) -> KeyDeleteResponse
-- client.model_providers.anthropic.keys.list_agents(uuid, \*\*params) -> KeyListAgentsResponse
-
-## OpenAI
-
-### Keys
-
-Types:
-
-```python
-from gradientai.types.model_providers.openai import (
- KeyCreateResponse,
- KeyRetrieveResponse,
- KeyUpdateResponse,
- KeyListResponse,
- KeyDeleteResponse,
- KeyRetrieveAgentsResponse,
-)
-```
-
-Methods:
-
-- client.model_providers.openai.keys.create(\*\*params) -> KeyCreateResponse
-- client.model_providers.openai.keys.retrieve(api_key_uuid) -> KeyRetrieveResponse
-- client.model_providers.openai.keys.update(path_api_key_uuid, \*\*params) -> KeyUpdateResponse
-- client.model_providers.openai.keys.list(\*\*params) -> KeyListResponse
-- client.model_providers.openai.keys.delete(api_key_uuid) -> KeyDeleteResponse
-- client.model_providers.openai.keys.retrieve_agents(uuid, \*\*params) -> KeyRetrieveAgentsResponse
-
# Regions
Types:
@@ -438,4 +384,54 @@ from gradientai.types import APIAgreement, APIModel, APIModelVersion, ModelListR
Methods:
-- client.models.list(\*\*params) -> ModelListResponse
+- client.models.list(\*\*params) -> ModelListResponse
+
+## Providers
+
+### Anthropic
+
+Types:
+
+```python
+from gradientai.types.models.providers import (
+ AnthropicCreateResponse,
+ AnthropicRetrieveResponse,
+ AnthropicUpdateResponse,
+ AnthropicListResponse,
+ AnthropicDeleteResponse,
+ AnthropicListAgentsResponse,
+)
+```
+
+Methods:
+
+- client.models.providers.anthropic.create(\*\*params) -> AnthropicCreateResponse
+- client.models.providers.anthropic.retrieve(api_key_uuid) -> AnthropicRetrieveResponse
+- client.models.providers.anthropic.update(path_api_key_uuid, \*\*params) -> AnthropicUpdateResponse
+- client.models.providers.anthropic.list(\*\*params) -> AnthropicListResponse
+- client.models.providers.anthropic.delete(api_key_uuid) -> AnthropicDeleteResponse
+- client.models.providers.anthropic.list_agents(uuid, \*\*params) -> AnthropicListAgentsResponse
+
+### OpenAI
+
+Types:
+
+```python
+from gradientai.types.models.providers import (
+ OpenAICreateResponse,
+ OpenAIRetrieveResponse,
+ OpenAIUpdateResponse,
+ OpenAIListResponse,
+ OpenAIDeleteResponse,
+ OpenAIRetrieveAgentsResponse,
+)
+```
+
+Methods:
+
+- client.models.providers.openai.create(\*\*params) -> OpenAICreateResponse
+- client.models.providers.openai.retrieve(api_key_uuid) -> OpenAIRetrieveResponse
+- client.models.providers.openai.update(path_api_key_uuid, \*\*params) -> OpenAIUpdateResponse
+- client.models.providers.openai.list(\*\*params) -> OpenAIListResponse
+- client.models.providers.openai.delete(api_key_uuid) -> OpenAIDeleteResponse
+- client.models.providers.openai.retrieve_agents(uuid, \*\*params) -> OpenAIRetrieveAgentsResponse
diff --git a/pyproject.toml b/pyproject.toml
index 1a2e8c01..e6e44fcb 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python"
-version = "0.1.0-alpha.10"
+version = "0.1.0-alpha.11"
description = "The official Python library for GradientAI"
dynamic = ["readme"]
license = "Apache-2.0"
diff --git a/src/gradientai/_client.py b/src/gradientai/_client.py
index c9fe6733..2dc19e49 100644
--- a/src/gradientai/_client.py
+++ b/src/gradientai/_client.py
@@ -32,14 +32,13 @@
)
if TYPE_CHECKING:
- from .resources import chat, agents, models, regions, inference, knowledge_bases, model_providers
- from .resources.models import ModelsResource, AsyncModelsResource
+ from .resources import chat, agents, models, regions, inference, knowledge_bases
from .resources.regions import RegionsResource, AsyncRegionsResource
from .resources.chat.chat import ChatResource, AsyncChatResource
from .resources.agents.agents import AgentsResource, AsyncAgentsResource
+ from .resources.models.models import ModelsResource, AsyncModelsResource
from .resources.inference.inference import InferenceResource, AsyncInferenceResource
from .resources.knowledge_bases.knowledge_bases import KnowledgeBasesResource, AsyncKnowledgeBasesResource
- from .resources.model_providers.model_providers import ModelProvidersResource, AsyncModelProvidersResource
__all__ = [
"Timeout",
@@ -131,12 +130,6 @@ def chat(self) -> ChatResource:
return ChatResource(self)
- @cached_property
- def model_providers(self) -> ModelProvidersResource:
- from .resources.model_providers import ModelProvidersResource
-
- return ModelProvidersResource(self)
-
@cached_property
def regions(self) -> RegionsResource:
from .resources.regions import RegionsResource
@@ -371,12 +364,6 @@ def chat(self) -> AsyncChatResource:
return AsyncChatResource(self)
- @cached_property
- def model_providers(self) -> AsyncModelProvidersResource:
- from .resources.model_providers import AsyncModelProvidersResource
-
- return AsyncModelProvidersResource(self)
-
@cached_property
def regions(self) -> AsyncRegionsResource:
from .resources.regions import AsyncRegionsResource
@@ -551,12 +538,6 @@ def chat(self) -> chat.ChatResourceWithRawResponse:
return ChatResourceWithRawResponse(self._client.chat)
- @cached_property
- def model_providers(self) -> model_providers.ModelProvidersResourceWithRawResponse:
- from .resources.model_providers import ModelProvidersResourceWithRawResponse
-
- return ModelProvidersResourceWithRawResponse(self._client.model_providers)
-
@cached_property
def regions(self) -> regions.RegionsResourceWithRawResponse:
from .resources.regions import RegionsResourceWithRawResponse
@@ -600,12 +581,6 @@ def chat(self) -> chat.AsyncChatResourceWithRawResponse:
return AsyncChatResourceWithRawResponse(self._client.chat)
- @cached_property
- def model_providers(self) -> model_providers.AsyncModelProvidersResourceWithRawResponse:
- from .resources.model_providers import AsyncModelProvidersResourceWithRawResponse
-
- return AsyncModelProvidersResourceWithRawResponse(self._client.model_providers)
-
@cached_property
def regions(self) -> regions.AsyncRegionsResourceWithRawResponse:
from .resources.regions import AsyncRegionsResourceWithRawResponse
@@ -649,12 +624,6 @@ def chat(self) -> chat.ChatResourceWithStreamingResponse:
return ChatResourceWithStreamingResponse(self._client.chat)
- @cached_property
- def model_providers(self) -> model_providers.ModelProvidersResourceWithStreamingResponse:
- from .resources.model_providers import ModelProvidersResourceWithStreamingResponse
-
- return ModelProvidersResourceWithStreamingResponse(self._client.model_providers)
-
@cached_property
def regions(self) -> regions.RegionsResourceWithStreamingResponse:
from .resources.regions import RegionsResourceWithStreamingResponse
@@ -698,12 +667,6 @@ def chat(self) -> chat.AsyncChatResourceWithStreamingResponse:
return AsyncChatResourceWithStreamingResponse(self._client.chat)
- @cached_property
- def model_providers(self) -> model_providers.AsyncModelProvidersResourceWithStreamingResponse:
- from .resources.model_providers import AsyncModelProvidersResourceWithStreamingResponse
-
- return AsyncModelProvidersResourceWithStreamingResponse(self._client.model_providers)
-
@cached_property
def regions(self) -> regions.AsyncRegionsResourceWithStreamingResponse:
from .resources.regions import AsyncRegionsResourceWithStreamingResponse
diff --git a/src/gradientai/_version.py b/src/gradientai/_version.py
index f1fdf3c0..5c407722 100644
--- a/src/gradientai/_version.py
+++ b/src/gradientai/_version.py
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
__title__ = "gradientai"
-__version__ = "0.1.0-alpha.10" # x-release-please-version
+__version__ = "0.1.0-alpha.11" # x-release-please-version
diff --git a/src/gradientai/resources/__init__.py b/src/gradientai/resources/__init__.py
index b56e7e4c..e1ed4a00 100644
--- a/src/gradientai/resources/__init__.py
+++ b/src/gradientai/resources/__init__.py
@@ -48,14 +48,6 @@
KnowledgeBasesResourceWithStreamingResponse,
AsyncKnowledgeBasesResourceWithStreamingResponse,
)
-from .model_providers import (
- ModelProvidersResource,
- AsyncModelProvidersResource,
- ModelProvidersResourceWithRawResponse,
- AsyncModelProvidersResourceWithRawResponse,
- ModelProvidersResourceWithStreamingResponse,
- AsyncModelProvidersResourceWithStreamingResponse,
-)
__all__ = [
"AgentsResource",
@@ -70,12 +62,6 @@
"AsyncChatResourceWithRawResponse",
"ChatResourceWithStreamingResponse",
"AsyncChatResourceWithStreamingResponse",
- "ModelProvidersResource",
- "AsyncModelProvidersResource",
- "ModelProvidersResourceWithRawResponse",
- "AsyncModelProvidersResourceWithRawResponse",
- "ModelProvidersResourceWithStreamingResponse",
- "AsyncModelProvidersResourceWithStreamingResponse",
"RegionsResource",
"AsyncRegionsResource",
"RegionsResourceWithRawResponse",
diff --git a/src/gradientai/resources/model_providers/anthropic/__init__.py b/src/gradientai/resources/model_providers/anthropic/__init__.py
deleted file mode 100644
index 057a3a2f..00000000
--- a/src/gradientai/resources/model_providers/anthropic/__init__.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from .keys import (
- KeysResource,
- AsyncKeysResource,
- KeysResourceWithRawResponse,
- AsyncKeysResourceWithRawResponse,
- KeysResourceWithStreamingResponse,
- AsyncKeysResourceWithStreamingResponse,
-)
-from .anthropic import (
- AnthropicResource,
- AsyncAnthropicResource,
- AnthropicResourceWithRawResponse,
- AsyncAnthropicResourceWithRawResponse,
- AnthropicResourceWithStreamingResponse,
- AsyncAnthropicResourceWithStreamingResponse,
-)
-
-__all__ = [
- "KeysResource",
- "AsyncKeysResource",
- "KeysResourceWithRawResponse",
- "AsyncKeysResourceWithRawResponse",
- "KeysResourceWithStreamingResponse",
- "AsyncKeysResourceWithStreamingResponse",
- "AnthropicResource",
- "AsyncAnthropicResource",
- "AnthropicResourceWithRawResponse",
- "AsyncAnthropicResourceWithRawResponse",
- "AnthropicResourceWithStreamingResponse",
- "AsyncAnthropicResourceWithStreamingResponse",
-]
diff --git a/src/gradientai/resources/model_providers/anthropic/anthropic.py b/src/gradientai/resources/model_providers/anthropic/anthropic.py
deleted file mode 100644
index 23a914e9..00000000
--- a/src/gradientai/resources/model_providers/anthropic/anthropic.py
+++ /dev/null
@@ -1,102 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from .keys import (
- KeysResource,
- AsyncKeysResource,
- KeysResourceWithRawResponse,
- AsyncKeysResourceWithRawResponse,
- KeysResourceWithStreamingResponse,
- AsyncKeysResourceWithStreamingResponse,
-)
-from ...._compat import cached_property
-from ...._resource import SyncAPIResource, AsyncAPIResource
-
-__all__ = ["AnthropicResource", "AsyncAnthropicResource"]
-
-
-class AnthropicResource(SyncAPIResource):
- @cached_property
- def keys(self) -> KeysResource:
- return KeysResource(self._client)
-
- @cached_property
- def with_raw_response(self) -> AnthropicResourceWithRawResponse:
- """
- This property can be used as a prefix for any HTTP method call to return
- the raw response object instead of the parsed content.
-
- For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers
- """
- return AnthropicResourceWithRawResponse(self)
-
- @cached_property
- def with_streaming_response(self) -> AnthropicResourceWithStreamingResponse:
- """
- An alternative to `.with_raw_response` that doesn't eagerly read the response body.
-
- For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response
- """
- return AnthropicResourceWithStreamingResponse(self)
-
-
-class AsyncAnthropicResource(AsyncAPIResource):
- @cached_property
- def keys(self) -> AsyncKeysResource:
- return AsyncKeysResource(self._client)
-
- @cached_property
- def with_raw_response(self) -> AsyncAnthropicResourceWithRawResponse:
- """
- This property can be used as a prefix for any HTTP method call to return
- the raw response object instead of the parsed content.
-
- For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers
- """
- return AsyncAnthropicResourceWithRawResponse(self)
-
- @cached_property
- def with_streaming_response(self) -> AsyncAnthropicResourceWithStreamingResponse:
- """
- An alternative to `.with_raw_response` that doesn't eagerly read the response body.
-
- For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response
- """
- return AsyncAnthropicResourceWithStreamingResponse(self)
-
-
-class AnthropicResourceWithRawResponse:
- def __init__(self, anthropic: AnthropicResource) -> None:
- self._anthropic = anthropic
-
- @cached_property
- def keys(self) -> KeysResourceWithRawResponse:
- return KeysResourceWithRawResponse(self._anthropic.keys)
-
-
-class AsyncAnthropicResourceWithRawResponse:
- def __init__(self, anthropic: AsyncAnthropicResource) -> None:
- self._anthropic = anthropic
-
- @cached_property
- def keys(self) -> AsyncKeysResourceWithRawResponse:
- return AsyncKeysResourceWithRawResponse(self._anthropic.keys)
-
-
-class AnthropicResourceWithStreamingResponse:
- def __init__(self, anthropic: AnthropicResource) -> None:
- self._anthropic = anthropic
-
- @cached_property
- def keys(self) -> KeysResourceWithStreamingResponse:
- return KeysResourceWithStreamingResponse(self._anthropic.keys)
-
-
-class AsyncAnthropicResourceWithStreamingResponse:
- def __init__(self, anthropic: AsyncAnthropicResource) -> None:
- self._anthropic = anthropic
-
- @cached_property
- def keys(self) -> AsyncKeysResourceWithStreamingResponse:
- return AsyncKeysResourceWithStreamingResponse(self._anthropic.keys)
diff --git a/src/gradientai/resources/model_providers/openai/__init__.py b/src/gradientai/resources/model_providers/openai/__init__.py
deleted file mode 100644
index 66d8ca7a..00000000
--- a/src/gradientai/resources/model_providers/openai/__init__.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from .keys import (
- KeysResource,
- AsyncKeysResource,
- KeysResourceWithRawResponse,
- AsyncKeysResourceWithRawResponse,
- KeysResourceWithStreamingResponse,
- AsyncKeysResourceWithStreamingResponse,
-)
-from .openai import (
- OpenAIResource,
- AsyncOpenAIResource,
- OpenAIResourceWithRawResponse,
- AsyncOpenAIResourceWithRawResponse,
- OpenAIResourceWithStreamingResponse,
- AsyncOpenAIResourceWithStreamingResponse,
-)
-
-__all__ = [
- "KeysResource",
- "AsyncKeysResource",
- "KeysResourceWithRawResponse",
- "AsyncKeysResourceWithRawResponse",
- "KeysResourceWithStreamingResponse",
- "AsyncKeysResourceWithStreamingResponse",
- "OpenAIResource",
- "AsyncOpenAIResource",
- "OpenAIResourceWithRawResponse",
- "AsyncOpenAIResourceWithRawResponse",
- "OpenAIResourceWithStreamingResponse",
- "AsyncOpenAIResourceWithStreamingResponse",
-]
diff --git a/src/gradientai/resources/model_providers/openai/openai.py b/src/gradientai/resources/model_providers/openai/openai.py
deleted file mode 100644
index b02dc2e1..00000000
--- a/src/gradientai/resources/model_providers/openai/openai.py
+++ /dev/null
@@ -1,102 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from .keys import (
- KeysResource,
- AsyncKeysResource,
- KeysResourceWithRawResponse,
- AsyncKeysResourceWithRawResponse,
- KeysResourceWithStreamingResponse,
- AsyncKeysResourceWithStreamingResponse,
-)
-from ...._compat import cached_property
-from ...._resource import SyncAPIResource, AsyncAPIResource
-
-__all__ = ["OpenAIResource", "AsyncOpenAIResource"]
-
-
-class OpenAIResource(SyncAPIResource):
- @cached_property
- def keys(self) -> KeysResource:
- return KeysResource(self._client)
-
- @cached_property
- def with_raw_response(self) -> OpenAIResourceWithRawResponse:
- """
- This property can be used as a prefix for any HTTP method call to return
- the raw response object instead of the parsed content.
-
- For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers
- """
- return OpenAIResourceWithRawResponse(self)
-
- @cached_property
- def with_streaming_response(self) -> OpenAIResourceWithStreamingResponse:
- """
- An alternative to `.with_raw_response` that doesn't eagerly read the response body.
-
- For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response
- """
- return OpenAIResourceWithStreamingResponse(self)
-
-
-class AsyncOpenAIResource(AsyncAPIResource):
- @cached_property
- def keys(self) -> AsyncKeysResource:
- return AsyncKeysResource(self._client)
-
- @cached_property
- def with_raw_response(self) -> AsyncOpenAIResourceWithRawResponse:
- """
- This property can be used as a prefix for any HTTP method call to return
- the raw response object instead of the parsed content.
-
- For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers
- """
- return AsyncOpenAIResourceWithRawResponse(self)
-
- @cached_property
- def with_streaming_response(self) -> AsyncOpenAIResourceWithStreamingResponse:
- """
- An alternative to `.with_raw_response` that doesn't eagerly read the response body.
-
- For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response
- """
- return AsyncOpenAIResourceWithStreamingResponse(self)
-
-
-class OpenAIResourceWithRawResponse:
- def __init__(self, openai: OpenAIResource) -> None:
- self._openai = openai
-
- @cached_property
- def keys(self) -> KeysResourceWithRawResponse:
- return KeysResourceWithRawResponse(self._openai.keys)
-
-
-class AsyncOpenAIResourceWithRawResponse:
- def __init__(self, openai: AsyncOpenAIResource) -> None:
- self._openai = openai
-
- @cached_property
- def keys(self) -> AsyncKeysResourceWithRawResponse:
- return AsyncKeysResourceWithRawResponse(self._openai.keys)
-
-
-class OpenAIResourceWithStreamingResponse:
- def __init__(self, openai: OpenAIResource) -> None:
- self._openai = openai
-
- @cached_property
- def keys(self) -> KeysResourceWithStreamingResponse:
- return KeysResourceWithStreamingResponse(self._openai.keys)
-
-
-class AsyncOpenAIResourceWithStreamingResponse:
- def __init__(self, openai: AsyncOpenAIResource) -> None:
- self._openai = openai
-
- @cached_property
- def keys(self) -> AsyncKeysResourceWithStreamingResponse:
- return AsyncKeysResourceWithStreamingResponse(self._openai.keys)
diff --git a/src/gradientai/resources/models/__init__.py b/src/gradientai/resources/models/__init__.py
new file mode 100644
index 00000000..e30dd201
--- /dev/null
+++ b/src/gradientai/resources/models/__init__.py
@@ -0,0 +1,33 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from .models import (
+ ModelsResource,
+ AsyncModelsResource,
+ ModelsResourceWithRawResponse,
+ AsyncModelsResourceWithRawResponse,
+ ModelsResourceWithStreamingResponse,
+ AsyncModelsResourceWithStreamingResponse,
+)
+from .providers import (
+ ProvidersResource,
+ AsyncProvidersResource,
+ ProvidersResourceWithRawResponse,
+ AsyncProvidersResourceWithRawResponse,
+ ProvidersResourceWithStreamingResponse,
+ AsyncProvidersResourceWithStreamingResponse,
+)
+
+__all__ = [
+ "ProvidersResource",
+ "AsyncProvidersResource",
+ "ProvidersResourceWithRawResponse",
+ "AsyncProvidersResourceWithRawResponse",
+ "ProvidersResourceWithStreamingResponse",
+ "AsyncProvidersResourceWithStreamingResponse",
+ "ModelsResource",
+ "AsyncModelsResource",
+ "ModelsResourceWithRawResponse",
+ "AsyncModelsResourceWithRawResponse",
+ "ModelsResourceWithStreamingResponse",
+ "AsyncModelsResourceWithStreamingResponse",
+]
diff --git a/src/gradientai/resources/models.py b/src/gradientai/resources/models/models.py
similarity index 85%
rename from src/gradientai/resources/models.py
rename to src/gradientai/resources/models/models.py
index c8e78b9b..3c524767 100644
--- a/src/gradientai/resources/models.py
+++ b/src/gradientai/resources/models/models.py
@@ -7,24 +7,36 @@
import httpx
-from ..types import model_list_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
-from .._utils import maybe_transform, async_maybe_transform
-from .._compat import cached_property
-from .._resource import SyncAPIResource, AsyncAPIResource
-from .._response import (
+from ...types import model_list_params
+from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._utils import maybe_transform, async_maybe_transform
+from ..._compat import cached_property
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from ..._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from .._base_client import make_request_options
-from ..types.model_list_response import ModelListResponse
+from ..._base_client import make_request_options
+from .providers.providers import (
+ ProvidersResource,
+ AsyncProvidersResource,
+ ProvidersResourceWithRawResponse,
+ AsyncProvidersResourceWithRawResponse,
+ ProvidersResourceWithStreamingResponse,
+ AsyncProvidersResourceWithStreamingResponse,
+)
+from ...types.model_list_response import ModelListResponse
__all__ = ["ModelsResource", "AsyncModelsResource"]
class ModelsResource(SyncAPIResource):
+ @cached_property
+ def providers(self) -> ProvidersResource:
+ return ProvidersResource(self._client)
+
@cached_property
def with_raw_response(self) -> ModelsResourceWithRawResponse:
"""
@@ -122,6 +134,10 @@ def list(
class AsyncModelsResource(AsyncAPIResource):
+ @cached_property
+ def providers(self) -> AsyncProvidersResource:
+ return AsyncProvidersResource(self._client)
+
@cached_property
def with_raw_response(self) -> AsyncModelsResourceWithRawResponse:
"""
@@ -226,6 +242,10 @@ def __init__(self, models: ModelsResource) -> None:
models.list,
)
+ @cached_property
+ def providers(self) -> ProvidersResourceWithRawResponse:
+ return ProvidersResourceWithRawResponse(self._models.providers)
+
class AsyncModelsResourceWithRawResponse:
def __init__(self, models: AsyncModelsResource) -> None:
@@ -235,6 +255,10 @@ def __init__(self, models: AsyncModelsResource) -> None:
models.list,
)
+ @cached_property
+ def providers(self) -> AsyncProvidersResourceWithRawResponse:
+ return AsyncProvidersResourceWithRawResponse(self._models.providers)
+
class ModelsResourceWithStreamingResponse:
def __init__(self, models: ModelsResource) -> None:
@@ -244,6 +268,10 @@ def __init__(self, models: ModelsResource) -> None:
models.list,
)
+ @cached_property
+ def providers(self) -> ProvidersResourceWithStreamingResponse:
+ return ProvidersResourceWithStreamingResponse(self._models.providers)
+
class AsyncModelsResourceWithStreamingResponse:
def __init__(self, models: AsyncModelsResource) -> None:
@@ -252,3 +280,7 @@ def __init__(self, models: AsyncModelsResource) -> None:
self.list = async_to_streamed_response_wrapper(
models.list,
)
+
+ @cached_property
+ def providers(self) -> AsyncProvidersResourceWithStreamingResponse:
+ return AsyncProvidersResourceWithStreamingResponse(self._models.providers)
diff --git a/src/gradientai/resources/model_providers/__init__.py b/src/gradientai/resources/models/providers/__init__.py
similarity index 65%
rename from src/gradientai/resources/model_providers/__init__.py
rename to src/gradientai/resources/models/providers/__init__.py
index 3d91a86c..1731e057 100644
--- a/src/gradientai/resources/model_providers/__init__.py
+++ b/src/gradientai/resources/models/providers/__init__.py
@@ -16,13 +16,13 @@
AnthropicResourceWithStreamingResponse,
AsyncAnthropicResourceWithStreamingResponse,
)
-from .model_providers import (
- ModelProvidersResource,
- AsyncModelProvidersResource,
- ModelProvidersResourceWithRawResponse,
- AsyncModelProvidersResourceWithRawResponse,
- ModelProvidersResourceWithStreamingResponse,
- AsyncModelProvidersResourceWithStreamingResponse,
+from .providers import (
+ ProvidersResource,
+ AsyncProvidersResource,
+ ProvidersResourceWithRawResponse,
+ AsyncProvidersResourceWithRawResponse,
+ ProvidersResourceWithStreamingResponse,
+ AsyncProvidersResourceWithStreamingResponse,
)
__all__ = [
@@ -38,10 +38,10 @@
"AsyncOpenAIResourceWithRawResponse",
"OpenAIResourceWithStreamingResponse",
"AsyncOpenAIResourceWithStreamingResponse",
- "ModelProvidersResource",
- "AsyncModelProvidersResource",
- "ModelProvidersResourceWithRawResponse",
- "AsyncModelProvidersResourceWithRawResponse",
- "ModelProvidersResourceWithStreamingResponse",
- "AsyncModelProvidersResourceWithStreamingResponse",
+ "ProvidersResource",
+ "AsyncProvidersResource",
+ "ProvidersResourceWithRawResponse",
+ "AsyncProvidersResourceWithRawResponse",
+ "ProvidersResourceWithStreamingResponse",
+ "AsyncProvidersResourceWithStreamingResponse",
]
diff --git a/src/gradientai/resources/model_providers/anthropic/keys.py b/src/gradientai/resources/models/providers/anthropic.py
similarity index 84%
rename from src/gradientai/resources/model_providers/anthropic/keys.py
rename to src/gradientai/resources/models/providers/anthropic.py
index 4d884655..26c9b977 100644
--- a/src/gradientai/resources/model_providers/anthropic/keys.py
+++ b/src/gradientai/resources/models/providers/anthropic.py
@@ -15,41 +15,41 @@
async_to_streamed_response_wrapper,
)
from ...._base_client import make_request_options
-from ....types.model_providers.anthropic import (
- key_list_params,
- key_create_params,
- key_update_params,
- key_list_agents_params,
+from ....types.models.providers import (
+ anthropic_list_params,
+ anthropic_create_params,
+ anthropic_update_params,
+ anthropic_list_agents_params,
)
-from ....types.model_providers.anthropic.key_list_response import KeyListResponse
-from ....types.model_providers.anthropic.key_create_response import KeyCreateResponse
-from ....types.model_providers.anthropic.key_delete_response import KeyDeleteResponse
-from ....types.model_providers.anthropic.key_update_response import KeyUpdateResponse
-from ....types.model_providers.anthropic.key_retrieve_response import KeyRetrieveResponse
-from ....types.model_providers.anthropic.key_list_agents_response import KeyListAgentsResponse
+from ....types.models.providers.anthropic_list_response import AnthropicListResponse
+from ....types.models.providers.anthropic_create_response import AnthropicCreateResponse
+from ....types.models.providers.anthropic_delete_response import AnthropicDeleteResponse
+from ....types.models.providers.anthropic_update_response import AnthropicUpdateResponse
+from ....types.models.providers.anthropic_retrieve_response import AnthropicRetrieveResponse
+from ....types.models.providers.anthropic_list_agents_response import AnthropicListAgentsResponse
-__all__ = ["KeysResource", "AsyncKeysResource"]
+__all__ = ["AnthropicResource", "AsyncAnthropicResource"]
-class KeysResource(SyncAPIResource):
+class AnthropicResource(SyncAPIResource):
@cached_property
- def with_raw_response(self) -> KeysResourceWithRawResponse:
+ def with_raw_response(self) -> AnthropicResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers
"""
- return KeysResourceWithRawResponse(self)
+ return AnthropicResourceWithRawResponse(self)
@cached_property
- def with_streaming_response(self) -> KeysResourceWithStreamingResponse:
+ def with_streaming_response(self) -> AnthropicResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response
"""
- return KeysResourceWithStreamingResponse(self)
+ return AnthropicResourceWithStreamingResponse(self)
def create(
self,
@@ -62,7 +62,7 @@ def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyCreateResponse:
+ ) -> AnthropicCreateResponse:
"""
To create an Anthropic API key, send a POST request to
`/v2/gen-ai/anthropic/keys`.
@@ -85,12 +85,12 @@ def create(
"api_key": api_key,
"name": name,
},
- key_create_params.KeyCreateParams,
+ anthropic_create_params.AnthropicCreateParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=KeyCreateResponse,
+ cast_to=AnthropicCreateResponse,
)
def retrieve(
@@ -103,7 +103,7 @@ def retrieve(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyRetrieveResponse:
+ ) -> AnthropicRetrieveResponse:
"""
To retrieve details of an Anthropic API key, send a GET request to
`/v2/gen-ai/anthropic/keys/{api_key_uuid}`.
@@ -126,7 +126,7 @@ def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=KeyRetrieveResponse,
+ cast_to=AnthropicRetrieveResponse,
)
def update(
@@ -142,7 +142,7 @@ def update(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyUpdateResponse:
+ ) -> AnthropicUpdateResponse:
"""
To update an Anthropic API key, send a PUT request to
`/v2/gen-ai/anthropic/keys/{api_key_uuid}`.
@@ -168,12 +168,12 @@ def update(
"body_api_key_uuid": body_api_key_uuid,
"name": name,
},
- key_update_params.KeyUpdateParams,
+ anthropic_update_params.AnthropicUpdateParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=KeyUpdateResponse,
+ cast_to=AnthropicUpdateResponse,
)
def list(
@@ -187,7 +187,7 @@ def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyListResponse:
+ ) -> AnthropicListResponse:
"""
To list all Anthropic API keys, send a GET request to
`/v2/gen-ai/anthropic/keys`.
@@ -219,10 +219,10 @@ def list(
"page": page,
"per_page": per_page,
},
- key_list_params.KeyListParams,
+ anthropic_list_params.AnthropicListParams,
),
),
- cast_to=KeyListResponse,
+ cast_to=AnthropicListResponse,
)
def delete(
@@ -235,7 +235,7 @@ def delete(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyDeleteResponse:
+ ) -> AnthropicDeleteResponse:
"""
To delete an Anthropic API key, send a DELETE request to
`/v2/gen-ai/anthropic/keys/{api_key_uuid}`.
@@ -258,7 +258,7 @@ def delete(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=KeyDeleteResponse,
+ cast_to=AnthropicDeleteResponse,
)
def list_agents(
@@ -273,7 +273,7 @@ def list_agents(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyListAgentsResponse:
+ ) -> AnthropicListAgentsResponse:
"""
List Agents by Anthropic Key.
@@ -306,32 +306,32 @@ def list_agents(
"page": page,
"per_page": per_page,
},
- key_list_agents_params.KeyListAgentsParams,
+ anthropic_list_agents_params.AnthropicListAgentsParams,
),
),
- cast_to=KeyListAgentsResponse,
+ cast_to=AnthropicListAgentsResponse,
)
-class AsyncKeysResource(AsyncAPIResource):
+class AsyncAnthropicResource(AsyncAPIResource):
@cached_property
- def with_raw_response(self) -> AsyncKeysResourceWithRawResponse:
+ def with_raw_response(self) -> AsyncAnthropicResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers
"""
- return AsyncKeysResourceWithRawResponse(self)
+ return AsyncAnthropicResourceWithRawResponse(self)
@cached_property
- def with_streaming_response(self) -> AsyncKeysResourceWithStreamingResponse:
+ def with_streaming_response(self) -> AsyncAnthropicResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response
"""
- return AsyncKeysResourceWithStreamingResponse(self)
+ return AsyncAnthropicResourceWithStreamingResponse(self)
async def create(
self,
@@ -344,7 +344,7 @@ async def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyCreateResponse:
+ ) -> AnthropicCreateResponse:
"""
To create an Anthropic API key, send a POST request to
`/v2/gen-ai/anthropic/keys`.
@@ -367,12 +367,12 @@ async def create(
"api_key": api_key,
"name": name,
},
- key_create_params.KeyCreateParams,
+ anthropic_create_params.AnthropicCreateParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=KeyCreateResponse,
+ cast_to=AnthropicCreateResponse,
)
async def retrieve(
@@ -385,7 +385,7 @@ async def retrieve(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyRetrieveResponse:
+ ) -> AnthropicRetrieveResponse:
"""
To retrieve details of an Anthropic API key, send a GET request to
`/v2/gen-ai/anthropic/keys/{api_key_uuid}`.
@@ -408,7 +408,7 @@ async def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=KeyRetrieveResponse,
+ cast_to=AnthropicRetrieveResponse,
)
async def update(
@@ -424,7 +424,7 @@ async def update(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyUpdateResponse:
+ ) -> AnthropicUpdateResponse:
"""
To update an Anthropic API key, send a PUT request to
`/v2/gen-ai/anthropic/keys/{api_key_uuid}`.
@@ -450,12 +450,12 @@ async def update(
"body_api_key_uuid": body_api_key_uuid,
"name": name,
},
- key_update_params.KeyUpdateParams,
+ anthropic_update_params.AnthropicUpdateParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=KeyUpdateResponse,
+ cast_to=AnthropicUpdateResponse,
)
async def list(
@@ -469,7 +469,7 @@ async def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyListResponse:
+ ) -> AnthropicListResponse:
"""
To list all Anthropic API keys, send a GET request to
`/v2/gen-ai/anthropic/keys`.
@@ -501,10 +501,10 @@ async def list(
"page": page,
"per_page": per_page,
},
- key_list_params.KeyListParams,
+ anthropic_list_params.AnthropicListParams,
),
),
- cast_to=KeyListResponse,
+ cast_to=AnthropicListResponse,
)
async def delete(
@@ -517,7 +517,7 @@ async def delete(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyDeleteResponse:
+ ) -> AnthropicDeleteResponse:
"""
To delete an Anthropic API key, send a DELETE request to
`/v2/gen-ai/anthropic/keys/{api_key_uuid}`.
@@ -540,7 +540,7 @@ async def delete(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=KeyDeleteResponse,
+ cast_to=AnthropicDeleteResponse,
)
async def list_agents(
@@ -555,7 +555,7 @@ async def list_agents(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyListAgentsResponse:
+ ) -> AnthropicListAgentsResponse:
"""
List Agents by Anthropic Key.
@@ -588,104 +588,104 @@ async def list_agents(
"page": page,
"per_page": per_page,
},
- key_list_agents_params.KeyListAgentsParams,
+ anthropic_list_agents_params.AnthropicListAgentsParams,
),
),
- cast_to=KeyListAgentsResponse,
+ cast_to=AnthropicListAgentsResponse,
)
-class KeysResourceWithRawResponse:
- def __init__(self, keys: KeysResource) -> None:
- self._keys = keys
+class AnthropicResourceWithRawResponse:
+ def __init__(self, anthropic: AnthropicResource) -> None:
+ self._anthropic = anthropic
self.create = to_raw_response_wrapper(
- keys.create,
+ anthropic.create,
)
self.retrieve = to_raw_response_wrapper(
- keys.retrieve,
+ anthropic.retrieve,
)
self.update = to_raw_response_wrapper(
- keys.update,
+ anthropic.update,
)
self.list = to_raw_response_wrapper(
- keys.list,
+ anthropic.list,
)
self.delete = to_raw_response_wrapper(
- keys.delete,
+ anthropic.delete,
)
self.list_agents = to_raw_response_wrapper(
- keys.list_agents,
+ anthropic.list_agents,
)
-class AsyncKeysResourceWithRawResponse:
- def __init__(self, keys: AsyncKeysResource) -> None:
- self._keys = keys
+class AsyncAnthropicResourceWithRawResponse:
+ def __init__(self, anthropic: AsyncAnthropicResource) -> None:
+ self._anthropic = anthropic
self.create = async_to_raw_response_wrapper(
- keys.create,
+ anthropic.create,
)
self.retrieve = async_to_raw_response_wrapper(
- keys.retrieve,
+ anthropic.retrieve,
)
self.update = async_to_raw_response_wrapper(
- keys.update,
+ anthropic.update,
)
self.list = async_to_raw_response_wrapper(
- keys.list,
+ anthropic.list,
)
self.delete = async_to_raw_response_wrapper(
- keys.delete,
+ anthropic.delete,
)
self.list_agents = async_to_raw_response_wrapper(
- keys.list_agents,
+ anthropic.list_agents,
)
-class KeysResourceWithStreamingResponse:
- def __init__(self, keys: KeysResource) -> None:
- self._keys = keys
+class AnthropicResourceWithStreamingResponse:
+ def __init__(self, anthropic: AnthropicResource) -> None:
+ self._anthropic = anthropic
self.create = to_streamed_response_wrapper(
- keys.create,
+ anthropic.create,
)
self.retrieve = to_streamed_response_wrapper(
- keys.retrieve,
+ anthropic.retrieve,
)
self.update = to_streamed_response_wrapper(
- keys.update,
+ anthropic.update,
)
self.list = to_streamed_response_wrapper(
- keys.list,
+ anthropic.list,
)
self.delete = to_streamed_response_wrapper(
- keys.delete,
+ anthropic.delete,
)
self.list_agents = to_streamed_response_wrapper(
- keys.list_agents,
+ anthropic.list_agents,
)
-class AsyncKeysResourceWithStreamingResponse:
- def __init__(self, keys: AsyncKeysResource) -> None:
- self._keys = keys
+class AsyncAnthropicResourceWithStreamingResponse:
+ def __init__(self, anthropic: AsyncAnthropicResource) -> None:
+ self._anthropic = anthropic
self.create = async_to_streamed_response_wrapper(
- keys.create,
+ anthropic.create,
)
self.retrieve = async_to_streamed_response_wrapper(
- keys.retrieve,
+ anthropic.retrieve,
)
self.update = async_to_streamed_response_wrapper(
- keys.update,
+ anthropic.update,
)
self.list = async_to_streamed_response_wrapper(
- keys.list,
+ anthropic.list,
)
self.delete = async_to_streamed_response_wrapper(
- keys.delete,
+ anthropic.delete,
)
self.list_agents = async_to_streamed_response_wrapper(
- keys.list_agents,
+ anthropic.list_agents,
)
diff --git a/src/gradientai/resources/model_providers/openai/keys.py b/src/gradientai/resources/models/providers/openai.py
similarity index 85%
rename from src/gradientai/resources/model_providers/openai/keys.py
rename to src/gradientai/resources/models/providers/openai.py
index fb974808..d337cd9b 100644
--- a/src/gradientai/resources/model_providers/openai/keys.py
+++ b/src/gradientai/resources/models/providers/openai.py
@@ -15,41 +15,41 @@
async_to_streamed_response_wrapper,
)
from ...._base_client import make_request_options
-from ....types.model_providers.openai import (
- key_list_params,
- key_create_params,
- key_update_params,
- key_retrieve_agents_params,
+from ....types.models.providers import (
+ openai_list_params,
+ openai_create_params,
+ openai_update_params,
+ openai_retrieve_agents_params,
)
-from ....types.model_providers.openai.key_list_response import KeyListResponse
-from ....types.model_providers.openai.key_create_response import KeyCreateResponse
-from ....types.model_providers.openai.key_delete_response import KeyDeleteResponse
-from ....types.model_providers.openai.key_update_response import KeyUpdateResponse
-from ....types.model_providers.openai.key_retrieve_response import KeyRetrieveResponse
-from ....types.model_providers.openai.key_retrieve_agents_response import KeyRetrieveAgentsResponse
+from ....types.models.providers.openai_list_response import OpenAIListResponse
+from ....types.models.providers.openai_create_response import OpenAICreateResponse
+from ....types.models.providers.openai_delete_response import OpenAIDeleteResponse
+from ....types.models.providers.openai_update_response import OpenAIUpdateResponse
+from ....types.models.providers.openai_retrieve_response import OpenAIRetrieveResponse
+from ....types.models.providers.openai_retrieve_agents_response import OpenAIRetrieveAgentsResponse
-__all__ = ["KeysResource", "AsyncKeysResource"]
+__all__ = ["OpenAIResource", "AsyncOpenAIResource"]
-class KeysResource(SyncAPIResource):
+class OpenAIResource(SyncAPIResource):
@cached_property
- def with_raw_response(self) -> KeysResourceWithRawResponse:
+ def with_raw_response(self) -> OpenAIResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers
"""
- return KeysResourceWithRawResponse(self)
+ return OpenAIResourceWithRawResponse(self)
@cached_property
- def with_streaming_response(self) -> KeysResourceWithStreamingResponse:
+ def with_streaming_response(self) -> OpenAIResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response
"""
- return KeysResourceWithStreamingResponse(self)
+ return OpenAIResourceWithStreamingResponse(self)
def create(
self,
@@ -62,7 +62,7 @@ def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyCreateResponse:
+ ) -> OpenAICreateResponse:
"""
To create an OpenAI API key, send a POST request to `/v2/gen-ai/openai/keys`.
@@ -84,12 +84,12 @@ def create(
"api_key": api_key,
"name": name,
},
- key_create_params.KeyCreateParams,
+ openai_create_params.OpenAICreateParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=KeyCreateResponse,
+ cast_to=OpenAICreateResponse,
)
def retrieve(
@@ -102,7 +102,7 @@ def retrieve(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyRetrieveResponse:
+ ) -> OpenAIRetrieveResponse:
"""
To retrieve details of an OpenAI API key, send a GET request to
`/v2/gen-ai/openai/keys/{api_key_uuid}`.
@@ -125,7 +125,7 @@ def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=KeyRetrieveResponse,
+ cast_to=OpenAIRetrieveResponse,
)
def update(
@@ -141,7 +141,7 @@ def update(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyUpdateResponse:
+ ) -> OpenAIUpdateResponse:
"""
To update an OpenAI API key, send a PUT request to
`/v2/gen-ai/openai/keys/{api_key_uuid}`.
@@ -167,12 +167,12 @@ def update(
"body_api_key_uuid": body_api_key_uuid,
"name": name,
},
- key_update_params.KeyUpdateParams,
+ openai_update_params.OpenAIUpdateParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=KeyUpdateResponse,
+ cast_to=OpenAIUpdateResponse,
)
def list(
@@ -186,7 +186,7 @@ def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyListResponse:
+ ) -> OpenAIListResponse:
"""
To list all OpenAI API keys, send a GET request to `/v2/gen-ai/openai/keys`.
@@ -217,10 +217,10 @@ def list(
"page": page,
"per_page": per_page,
},
- key_list_params.KeyListParams,
+ openai_list_params.OpenAIListParams,
),
),
- cast_to=KeyListResponse,
+ cast_to=OpenAIListResponse,
)
def delete(
@@ -233,7 +233,7 @@ def delete(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyDeleteResponse:
+ ) -> OpenAIDeleteResponse:
"""
To delete an OpenAI API key, send a DELETE request to
`/v2/gen-ai/openai/keys/{api_key_uuid}`.
@@ -256,7 +256,7 @@ def delete(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=KeyDeleteResponse,
+ cast_to=OpenAIDeleteResponse,
)
def retrieve_agents(
@@ -271,7 +271,7 @@ def retrieve_agents(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyRetrieveAgentsResponse:
+ ) -> OpenAIRetrieveAgentsResponse:
"""
List Agents by OpenAI Key.
@@ -304,32 +304,32 @@ def retrieve_agents(
"page": page,
"per_page": per_page,
},
- key_retrieve_agents_params.KeyRetrieveAgentsParams,
+ openai_retrieve_agents_params.OpenAIRetrieveAgentsParams,
),
),
- cast_to=KeyRetrieveAgentsResponse,
+ cast_to=OpenAIRetrieveAgentsResponse,
)
-class AsyncKeysResource(AsyncAPIResource):
+class AsyncOpenAIResource(AsyncAPIResource):
@cached_property
- def with_raw_response(self) -> AsyncKeysResourceWithRawResponse:
+ def with_raw_response(self) -> AsyncOpenAIResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers
"""
- return AsyncKeysResourceWithRawResponse(self)
+ return AsyncOpenAIResourceWithRawResponse(self)
@cached_property
- def with_streaming_response(self) -> AsyncKeysResourceWithStreamingResponse:
+ def with_streaming_response(self) -> AsyncOpenAIResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response
"""
- return AsyncKeysResourceWithStreamingResponse(self)
+ return AsyncOpenAIResourceWithStreamingResponse(self)
async def create(
self,
@@ -342,7 +342,7 @@ async def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyCreateResponse:
+ ) -> OpenAICreateResponse:
"""
To create an OpenAI API key, send a POST request to `/v2/gen-ai/openai/keys`.
@@ -364,12 +364,12 @@ async def create(
"api_key": api_key,
"name": name,
},
- key_create_params.KeyCreateParams,
+ openai_create_params.OpenAICreateParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=KeyCreateResponse,
+ cast_to=OpenAICreateResponse,
)
async def retrieve(
@@ -382,7 +382,7 @@ async def retrieve(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyRetrieveResponse:
+ ) -> OpenAIRetrieveResponse:
"""
To retrieve details of an OpenAI API key, send a GET request to
`/v2/gen-ai/openai/keys/{api_key_uuid}`.
@@ -405,7 +405,7 @@ async def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=KeyRetrieveResponse,
+ cast_to=OpenAIRetrieveResponse,
)
async def update(
@@ -421,7 +421,7 @@ async def update(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyUpdateResponse:
+ ) -> OpenAIUpdateResponse:
"""
To update an OpenAI API key, send a PUT request to
`/v2/gen-ai/openai/keys/{api_key_uuid}`.
@@ -447,12 +447,12 @@ async def update(
"body_api_key_uuid": body_api_key_uuid,
"name": name,
},
- key_update_params.KeyUpdateParams,
+ openai_update_params.OpenAIUpdateParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=KeyUpdateResponse,
+ cast_to=OpenAIUpdateResponse,
)
async def list(
@@ -466,7 +466,7 @@ async def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyListResponse:
+ ) -> OpenAIListResponse:
"""
To list all OpenAI API keys, send a GET request to `/v2/gen-ai/openai/keys`.
@@ -497,10 +497,10 @@ async def list(
"page": page,
"per_page": per_page,
},
- key_list_params.KeyListParams,
+ openai_list_params.OpenAIListParams,
),
),
- cast_to=KeyListResponse,
+ cast_to=OpenAIListResponse,
)
async def delete(
@@ -513,7 +513,7 @@ async def delete(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyDeleteResponse:
+ ) -> OpenAIDeleteResponse:
"""
To delete an OpenAI API key, send a DELETE request to
`/v2/gen-ai/openai/keys/{api_key_uuid}`.
@@ -536,7 +536,7 @@ async def delete(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=KeyDeleteResponse,
+ cast_to=OpenAIDeleteResponse,
)
async def retrieve_agents(
@@ -551,7 +551,7 @@ async def retrieve_agents(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> KeyRetrieveAgentsResponse:
+ ) -> OpenAIRetrieveAgentsResponse:
"""
List Agents by OpenAI Key.
@@ -584,104 +584,104 @@ async def retrieve_agents(
"page": page,
"per_page": per_page,
},
- key_retrieve_agents_params.KeyRetrieveAgentsParams,
+ openai_retrieve_agents_params.OpenAIRetrieveAgentsParams,
),
),
- cast_to=KeyRetrieveAgentsResponse,
+ cast_to=OpenAIRetrieveAgentsResponse,
)
-class KeysResourceWithRawResponse:
- def __init__(self, keys: KeysResource) -> None:
- self._keys = keys
+class OpenAIResourceWithRawResponse:
+ def __init__(self, openai: OpenAIResource) -> None:
+ self._openai = openai
self.create = to_raw_response_wrapper(
- keys.create,
+ openai.create,
)
self.retrieve = to_raw_response_wrapper(
- keys.retrieve,
+ openai.retrieve,
)
self.update = to_raw_response_wrapper(
- keys.update,
+ openai.update,
)
self.list = to_raw_response_wrapper(
- keys.list,
+ openai.list,
)
self.delete = to_raw_response_wrapper(
- keys.delete,
+ openai.delete,
)
self.retrieve_agents = to_raw_response_wrapper(
- keys.retrieve_agents,
+ openai.retrieve_agents,
)
-class AsyncKeysResourceWithRawResponse:
- def __init__(self, keys: AsyncKeysResource) -> None:
- self._keys = keys
+class AsyncOpenAIResourceWithRawResponse:
+ def __init__(self, openai: AsyncOpenAIResource) -> None:
+ self._openai = openai
self.create = async_to_raw_response_wrapper(
- keys.create,
+ openai.create,
)
self.retrieve = async_to_raw_response_wrapper(
- keys.retrieve,
+ openai.retrieve,
)
self.update = async_to_raw_response_wrapper(
- keys.update,
+ openai.update,
)
self.list = async_to_raw_response_wrapper(
- keys.list,
+ openai.list,
)
self.delete = async_to_raw_response_wrapper(
- keys.delete,
+ openai.delete,
)
self.retrieve_agents = async_to_raw_response_wrapper(
- keys.retrieve_agents,
+ openai.retrieve_agents,
)
-class KeysResourceWithStreamingResponse:
- def __init__(self, keys: KeysResource) -> None:
- self._keys = keys
+class OpenAIResourceWithStreamingResponse:
+ def __init__(self, openai: OpenAIResource) -> None:
+ self._openai = openai
self.create = to_streamed_response_wrapper(
- keys.create,
+ openai.create,
)
self.retrieve = to_streamed_response_wrapper(
- keys.retrieve,
+ openai.retrieve,
)
self.update = to_streamed_response_wrapper(
- keys.update,
+ openai.update,
)
self.list = to_streamed_response_wrapper(
- keys.list,
+ openai.list,
)
self.delete = to_streamed_response_wrapper(
- keys.delete,
+ openai.delete,
)
self.retrieve_agents = to_streamed_response_wrapper(
- keys.retrieve_agents,
+ openai.retrieve_agents,
)
-class AsyncKeysResourceWithStreamingResponse:
- def __init__(self, keys: AsyncKeysResource) -> None:
- self._keys = keys
+class AsyncOpenAIResourceWithStreamingResponse:
+ def __init__(self, openai: AsyncOpenAIResource) -> None:
+ self._openai = openai
self.create = async_to_streamed_response_wrapper(
- keys.create,
+ openai.create,
)
self.retrieve = async_to_streamed_response_wrapper(
- keys.retrieve,
+ openai.retrieve,
)
self.update = async_to_streamed_response_wrapper(
- keys.update,
+ openai.update,
)
self.list = async_to_streamed_response_wrapper(
- keys.list,
+ openai.list,
)
self.delete = async_to_streamed_response_wrapper(
- keys.delete,
+ openai.delete,
)
self.retrieve_agents = async_to_streamed_response_wrapper(
- keys.retrieve_agents,
+ openai.retrieve_agents,
)
diff --git a/src/gradientai/resources/model_providers/model_providers.py b/src/gradientai/resources/models/providers/providers.py
similarity index 58%
rename from src/gradientai/resources/model_providers/model_providers.py
rename to src/gradientai/resources/models/providers/providers.py
index cf710ecf..3e3f4dde 100644
--- a/src/gradientai/resources/model_providers/model_providers.py
+++ b/src/gradientai/resources/models/providers/providers.py
@@ -2,9 +2,7 @@
from __future__ import annotations
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from .openai.openai import (
+from .openai import (
OpenAIResource,
AsyncOpenAIResource,
OpenAIResourceWithRawResponse,
@@ -12,7 +10,7 @@
OpenAIResourceWithStreamingResponse,
AsyncOpenAIResourceWithStreamingResponse,
)
-from .anthropic.anthropic import (
+from .anthropic import (
AnthropicResource,
AsyncAnthropicResource,
AnthropicResourceWithRawResponse,
@@ -20,11 +18,13 @@
AnthropicResourceWithStreamingResponse,
AsyncAnthropicResourceWithStreamingResponse,
)
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
-__all__ = ["ModelProvidersResource", "AsyncModelProvidersResource"]
+__all__ = ["ProvidersResource", "AsyncProvidersResource"]
-class ModelProvidersResource(SyncAPIResource):
+class ProvidersResource(SyncAPIResource):
@cached_property
def anthropic(self) -> AnthropicResource:
return AnthropicResource(self._client)
@@ -34,26 +34,26 @@ def openai(self) -> OpenAIResource:
return OpenAIResource(self._client)
@cached_property
- def with_raw_response(self) -> ModelProvidersResourceWithRawResponse:
+ def with_raw_response(self) -> ProvidersResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers
"""
- return ModelProvidersResourceWithRawResponse(self)
+ return ProvidersResourceWithRawResponse(self)
@cached_property
- def with_streaming_response(self) -> ModelProvidersResourceWithStreamingResponse:
+ def with_streaming_response(self) -> ProvidersResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response
"""
- return ModelProvidersResourceWithStreamingResponse(self)
+ return ProvidersResourceWithStreamingResponse(self)
-class AsyncModelProvidersResource(AsyncAPIResource):
+class AsyncProvidersResource(AsyncAPIResource):
@cached_property
def anthropic(self) -> AsyncAnthropicResource:
return AsyncAnthropicResource(self._client)
@@ -63,72 +63,72 @@ def openai(self) -> AsyncOpenAIResource:
return AsyncOpenAIResource(self._client)
@cached_property
- def with_raw_response(self) -> AsyncModelProvidersResourceWithRawResponse:
+ def with_raw_response(self) -> AsyncProvidersResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers
"""
- return AsyncModelProvidersResourceWithRawResponse(self)
+ return AsyncProvidersResourceWithRawResponse(self)
@cached_property
- def with_streaming_response(self) -> AsyncModelProvidersResourceWithStreamingResponse:
+ def with_streaming_response(self) -> AsyncProvidersResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response
"""
- return AsyncModelProvidersResourceWithStreamingResponse(self)
+ return AsyncProvidersResourceWithStreamingResponse(self)
-class ModelProvidersResourceWithRawResponse:
- def __init__(self, model_providers: ModelProvidersResource) -> None:
- self._model_providers = model_providers
+class ProvidersResourceWithRawResponse:
+ def __init__(self, providers: ProvidersResource) -> None:
+ self._providers = providers
@cached_property
def anthropic(self) -> AnthropicResourceWithRawResponse:
- return AnthropicResourceWithRawResponse(self._model_providers.anthropic)
+ return AnthropicResourceWithRawResponse(self._providers.anthropic)
@cached_property
def openai(self) -> OpenAIResourceWithRawResponse:
- return OpenAIResourceWithRawResponse(self._model_providers.openai)
+ return OpenAIResourceWithRawResponse(self._providers.openai)
-class AsyncModelProvidersResourceWithRawResponse:
- def __init__(self, model_providers: AsyncModelProvidersResource) -> None:
- self._model_providers = model_providers
+class AsyncProvidersResourceWithRawResponse:
+ def __init__(self, providers: AsyncProvidersResource) -> None:
+ self._providers = providers
@cached_property
def anthropic(self) -> AsyncAnthropicResourceWithRawResponse:
- return AsyncAnthropicResourceWithRawResponse(self._model_providers.anthropic)
+ return AsyncAnthropicResourceWithRawResponse(self._providers.anthropic)
@cached_property
def openai(self) -> AsyncOpenAIResourceWithRawResponse:
- return AsyncOpenAIResourceWithRawResponse(self._model_providers.openai)
+ return AsyncOpenAIResourceWithRawResponse(self._providers.openai)
-class ModelProvidersResourceWithStreamingResponse:
- def __init__(self, model_providers: ModelProvidersResource) -> None:
- self._model_providers = model_providers
+class ProvidersResourceWithStreamingResponse:
+ def __init__(self, providers: ProvidersResource) -> None:
+ self._providers = providers
@cached_property
def anthropic(self) -> AnthropicResourceWithStreamingResponse:
- return AnthropicResourceWithStreamingResponse(self._model_providers.anthropic)
+ return AnthropicResourceWithStreamingResponse(self._providers.anthropic)
@cached_property
def openai(self) -> OpenAIResourceWithStreamingResponse:
- return OpenAIResourceWithStreamingResponse(self._model_providers.openai)
+ return OpenAIResourceWithStreamingResponse(self._providers.openai)
-class AsyncModelProvidersResourceWithStreamingResponse:
- def __init__(self, model_providers: AsyncModelProvidersResource) -> None:
- self._model_providers = model_providers
+class AsyncProvidersResourceWithStreamingResponse:
+ def __init__(self, providers: AsyncProvidersResource) -> None:
+ self._providers = providers
@cached_property
def anthropic(self) -> AsyncAnthropicResourceWithStreamingResponse:
- return AsyncAnthropicResourceWithStreamingResponse(self._model_providers.anthropic)
+ return AsyncAnthropicResourceWithStreamingResponse(self._providers.anthropic)
@cached_property
def openai(self) -> AsyncOpenAIResourceWithStreamingResponse:
- return AsyncOpenAIResourceWithStreamingResponse(self._model_providers.openai)
+ return AsyncOpenAIResourceWithStreamingResponse(self._providers.openai)
diff --git a/src/gradientai/types/model_providers/anthropic/__init__.py b/src/gradientai/types/model_providers/anthropic/__init__.py
deleted file mode 100644
index eb47e709..00000000
--- a/src/gradientai/types/model_providers/anthropic/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from .key_list_params import KeyListParams as KeyListParams
-from .key_create_params import KeyCreateParams as KeyCreateParams
-from .key_list_response import KeyListResponse as KeyListResponse
-from .key_update_params import KeyUpdateParams as KeyUpdateParams
-from .key_create_response import KeyCreateResponse as KeyCreateResponse
-from .key_delete_response import KeyDeleteResponse as KeyDeleteResponse
-from .key_update_response import KeyUpdateResponse as KeyUpdateResponse
-from .key_retrieve_response import KeyRetrieveResponse as KeyRetrieveResponse
-from .key_list_agents_params import KeyListAgentsParams as KeyListAgentsParams
-from .key_list_agents_response import KeyListAgentsResponse as KeyListAgentsResponse
diff --git a/src/gradientai/types/model_providers/openai/__init__.py b/src/gradientai/types/model_providers/openai/__init__.py
deleted file mode 100644
index 70abf332..00000000
--- a/src/gradientai/types/model_providers/openai/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from .key_list_params import KeyListParams as KeyListParams
-from .key_create_params import KeyCreateParams as KeyCreateParams
-from .key_list_response import KeyListResponse as KeyListResponse
-from .key_update_params import KeyUpdateParams as KeyUpdateParams
-from .key_create_response import KeyCreateResponse as KeyCreateResponse
-from .key_delete_response import KeyDeleteResponse as KeyDeleteResponse
-from .key_update_response import KeyUpdateResponse as KeyUpdateResponse
-from .key_retrieve_response import KeyRetrieveResponse as KeyRetrieveResponse
-from .key_retrieve_agents_params import KeyRetrieveAgentsParams as KeyRetrieveAgentsParams
-from .key_retrieve_agents_response import KeyRetrieveAgentsResponse as KeyRetrieveAgentsResponse
diff --git a/src/gradientai/types/model_providers/__init__.py b/src/gradientai/types/models/__init__.py
similarity index 100%
rename from src/gradientai/types/model_providers/__init__.py
rename to src/gradientai/types/models/__init__.py
diff --git a/src/gradientai/types/models/providers/__init__.py b/src/gradientai/types/models/providers/__init__.py
new file mode 100644
index 00000000..74366e70
--- /dev/null
+++ b/src/gradientai/types/models/providers/__init__.py
@@ -0,0 +1,24 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from .openai_list_params import OpenAIListParams as OpenAIListParams
+from .openai_create_params import OpenAICreateParams as OpenAICreateParams
+from .openai_list_response import OpenAIListResponse as OpenAIListResponse
+from .openai_update_params import OpenAIUpdateParams as OpenAIUpdateParams
+from .anthropic_list_params import AnthropicListParams as AnthropicListParams
+from .openai_create_response import OpenAICreateResponse as OpenAICreateResponse
+from .openai_delete_response import OpenAIDeleteResponse as OpenAIDeleteResponse
+from .openai_update_response import OpenAIUpdateResponse as OpenAIUpdateResponse
+from .anthropic_create_params import AnthropicCreateParams as AnthropicCreateParams
+from .anthropic_list_response import AnthropicListResponse as AnthropicListResponse
+from .anthropic_update_params import AnthropicUpdateParams as AnthropicUpdateParams
+from .openai_retrieve_response import OpenAIRetrieveResponse as OpenAIRetrieveResponse
+from .anthropic_create_response import AnthropicCreateResponse as AnthropicCreateResponse
+from .anthropic_delete_response import AnthropicDeleteResponse as AnthropicDeleteResponse
+from .anthropic_update_response import AnthropicUpdateResponse as AnthropicUpdateResponse
+from .anthropic_retrieve_response import AnthropicRetrieveResponse as AnthropicRetrieveResponse
+from .anthropic_list_agents_params import AnthropicListAgentsParams as AnthropicListAgentsParams
+from .openai_retrieve_agents_params import OpenAIRetrieveAgentsParams as OpenAIRetrieveAgentsParams
+from .anthropic_list_agents_response import AnthropicListAgentsResponse as AnthropicListAgentsResponse
+from .openai_retrieve_agents_response import OpenAIRetrieveAgentsResponse as OpenAIRetrieveAgentsResponse
diff --git a/src/gradientai/types/model_providers/openai/key_create_params.py b/src/gradientai/types/models/providers/anthropic_create_params.py
similarity index 68%
rename from src/gradientai/types/model_providers/openai/key_create_params.py
rename to src/gradientai/types/models/providers/anthropic_create_params.py
index 389f167c..b624121f 100644
--- a/src/gradientai/types/model_providers/openai/key_create_params.py
+++ b/src/gradientai/types/models/providers/anthropic_create_params.py
@@ -4,10 +4,10 @@
from typing_extensions import TypedDict
-__all__ = ["KeyCreateParams"]
+__all__ = ["AnthropicCreateParams"]
-class KeyCreateParams(TypedDict, total=False):
+class AnthropicCreateParams(TypedDict, total=False):
api_key: str
name: str
diff --git a/src/gradientai/types/model_providers/anthropic/key_update_response.py b/src/gradientai/types/models/providers/anthropic_create_response.py
similarity index 77%
rename from src/gradientai/types/model_providers/anthropic/key_update_response.py
rename to src/gradientai/types/models/providers/anthropic_create_response.py
index b04277a6..f0b8d2d1 100644
--- a/src/gradientai/types/model_providers/anthropic/key_update_response.py
+++ b/src/gradientai/types/models/providers/anthropic_create_response.py
@@ -5,8 +5,8 @@
from ...._models import BaseModel
from ...api_anthropic_api_key_info import APIAnthropicAPIKeyInfo
-__all__ = ["KeyUpdateResponse"]
+__all__ = ["AnthropicCreateResponse"]
-class KeyUpdateResponse(BaseModel):
+class AnthropicCreateResponse(BaseModel):
api_key_info: Optional[APIAnthropicAPIKeyInfo] = None
diff --git a/src/gradientai/types/model_providers/anthropic/key_create_response.py b/src/gradientai/types/models/providers/anthropic_delete_response.py
similarity index 77%
rename from src/gradientai/types/model_providers/anthropic/key_create_response.py
rename to src/gradientai/types/models/providers/anthropic_delete_response.py
index a032810c..a3842bbc 100644
--- a/src/gradientai/types/model_providers/anthropic/key_create_response.py
+++ b/src/gradientai/types/models/providers/anthropic_delete_response.py
@@ -5,8 +5,8 @@
from ...._models import BaseModel
from ...api_anthropic_api_key_info import APIAnthropicAPIKeyInfo
-__all__ = ["KeyCreateResponse"]
+__all__ = ["AnthropicDeleteResponse"]
-class KeyCreateResponse(BaseModel):
+class AnthropicDeleteResponse(BaseModel):
api_key_info: Optional[APIAnthropicAPIKeyInfo] = None
diff --git a/src/gradientai/types/model_providers/openai/key_retrieve_agents_params.py b/src/gradientai/types/models/providers/anthropic_list_agents_params.py
similarity index 71%
rename from src/gradientai/types/model_providers/openai/key_retrieve_agents_params.py
rename to src/gradientai/types/models/providers/anthropic_list_agents_params.py
index ec745d14..1a5b8229 100644
--- a/src/gradientai/types/model_providers/openai/key_retrieve_agents_params.py
+++ b/src/gradientai/types/models/providers/anthropic_list_agents_params.py
@@ -4,10 +4,10 @@
from typing_extensions import TypedDict
-__all__ = ["KeyRetrieveAgentsParams"]
+__all__ = ["AnthropicListAgentsParams"]
-class KeyRetrieveAgentsParams(TypedDict, total=False):
+class AnthropicListAgentsParams(TypedDict, total=False):
page: int
"""page number."""
diff --git a/src/gradientai/types/model_providers/anthropic/key_list_agents_response.py b/src/gradientai/types/models/providers/anthropic_list_agents_response.py
similarity index 83%
rename from src/gradientai/types/model_providers/anthropic/key_list_agents_response.py
rename to src/gradientai/types/models/providers/anthropic_list_agents_response.py
index c9e74cf7..6816f0db 100644
--- a/src/gradientai/types/model_providers/anthropic/key_list_agents_response.py
+++ b/src/gradientai/types/models/providers/anthropic_list_agents_response.py
@@ -8,10 +8,10 @@
from ...shared.api_meta import APIMeta
from ...shared.api_links import APILinks
-__all__ = ["KeyListAgentsResponse"]
+__all__ = ["AnthropicListAgentsResponse"]
-class KeyListAgentsResponse(BaseModel):
+class AnthropicListAgentsResponse(BaseModel):
agents: Optional[List["APIAgent"]] = None
links: Optional[APILinks] = None
diff --git a/src/gradientai/types/model_providers/anthropic/key_list_params.py b/src/gradientai/types/models/providers/anthropic_list_params.py
similarity index 74%
rename from src/gradientai/types/model_providers/anthropic/key_list_params.py
rename to src/gradientai/types/models/providers/anthropic_list_params.py
index a11458ad..de8ce520 100644
--- a/src/gradientai/types/model_providers/anthropic/key_list_params.py
+++ b/src/gradientai/types/models/providers/anthropic_list_params.py
@@ -4,10 +4,10 @@
from typing_extensions import TypedDict
-__all__ = ["KeyListParams"]
+__all__ = ["AnthropicListParams"]
-class KeyListParams(TypedDict, total=False):
+class AnthropicListParams(TypedDict, total=False):
page: int
"""page number."""
diff --git a/src/gradientai/types/model_providers/anthropic/key_list_response.py b/src/gradientai/types/models/providers/anthropic_list_response.py
similarity index 85%
rename from src/gradientai/types/model_providers/anthropic/key_list_response.py
rename to src/gradientai/types/models/providers/anthropic_list_response.py
index e3e3e5ef..77999f5b 100644
--- a/src/gradientai/types/model_providers/anthropic/key_list_response.py
+++ b/src/gradientai/types/models/providers/anthropic_list_response.py
@@ -7,10 +7,10 @@
from ...shared.api_links import APILinks
from ...api_anthropic_api_key_info import APIAnthropicAPIKeyInfo
-__all__ = ["KeyListResponse"]
+__all__ = ["AnthropicListResponse"]
-class KeyListResponse(BaseModel):
+class AnthropicListResponse(BaseModel):
api_key_infos: Optional[List[APIAnthropicAPIKeyInfo]] = None
links: Optional[APILinks] = None
diff --git a/src/gradientai/types/model_providers/anthropic/key_retrieve_response.py b/src/gradientai/types/models/providers/anthropic_retrieve_response.py
similarity index 76%
rename from src/gradientai/types/model_providers/anthropic/key_retrieve_response.py
rename to src/gradientai/types/models/providers/anthropic_retrieve_response.py
index b8361fc2..7083b75f 100644
--- a/src/gradientai/types/model_providers/anthropic/key_retrieve_response.py
+++ b/src/gradientai/types/models/providers/anthropic_retrieve_response.py
@@ -5,8 +5,8 @@
from ...._models import BaseModel
from ...api_anthropic_api_key_info import APIAnthropicAPIKeyInfo
-__all__ = ["KeyRetrieveResponse"]
+__all__ = ["AnthropicRetrieveResponse"]
-class KeyRetrieveResponse(BaseModel):
+class AnthropicRetrieveResponse(BaseModel):
api_key_info: Optional[APIAnthropicAPIKeyInfo] = None
diff --git a/src/gradientai/types/model_providers/openai/key_update_params.py b/src/gradientai/types/models/providers/anthropic_update_params.py
similarity index 78%
rename from src/gradientai/types/model_providers/openai/key_update_params.py
rename to src/gradientai/types/models/providers/anthropic_update_params.py
index c07d7f66..7bb03045 100644
--- a/src/gradientai/types/model_providers/openai/key_update_params.py
+++ b/src/gradientai/types/models/providers/anthropic_update_params.py
@@ -6,10 +6,10 @@
from ...._utils import PropertyInfo
-__all__ = ["KeyUpdateParams"]
+__all__ = ["AnthropicUpdateParams"]
-class KeyUpdateParams(TypedDict, total=False):
+class AnthropicUpdateParams(TypedDict, total=False):
api_key: str
body_api_key_uuid: Annotated[str, PropertyInfo(alias="api_key_uuid")]
diff --git a/src/gradientai/types/model_providers/anthropic/key_delete_response.py b/src/gradientai/types/models/providers/anthropic_update_response.py
similarity index 77%
rename from src/gradientai/types/model_providers/anthropic/key_delete_response.py
rename to src/gradientai/types/models/providers/anthropic_update_response.py
index 2afe2dda..d3b2911b 100644
--- a/src/gradientai/types/model_providers/anthropic/key_delete_response.py
+++ b/src/gradientai/types/models/providers/anthropic_update_response.py
@@ -5,8 +5,8 @@
from ...._models import BaseModel
from ...api_anthropic_api_key_info import APIAnthropicAPIKeyInfo
-__all__ = ["KeyDeleteResponse"]
+__all__ = ["AnthropicUpdateResponse"]
-class KeyDeleteResponse(BaseModel):
+class AnthropicUpdateResponse(BaseModel):
api_key_info: Optional[APIAnthropicAPIKeyInfo] = None
diff --git a/src/gradientai/types/model_providers/anthropic/key_create_params.py b/src/gradientai/types/models/providers/openai_create_params.py
similarity index 70%
rename from src/gradientai/types/model_providers/anthropic/key_create_params.py
rename to src/gradientai/types/models/providers/openai_create_params.py
index 389f167c..da655d75 100644
--- a/src/gradientai/types/model_providers/anthropic/key_create_params.py
+++ b/src/gradientai/types/models/providers/openai_create_params.py
@@ -4,10 +4,10 @@
from typing_extensions import TypedDict
-__all__ = ["KeyCreateParams"]
+__all__ = ["OpenAICreateParams"]
-class KeyCreateParams(TypedDict, total=False):
+class OpenAICreateParams(TypedDict, total=False):
api_key: str
name: str
diff --git a/src/gradientai/types/model_providers/openai/key_create_response.py b/src/gradientai/types/models/providers/openai_create_response.py
similarity index 78%
rename from src/gradientai/types/model_providers/openai/key_create_response.py
rename to src/gradientai/types/models/providers/openai_create_response.py
index f3b4d36c..4908a91a 100644
--- a/src/gradientai/types/model_providers/openai/key_create_response.py
+++ b/src/gradientai/types/models/providers/openai_create_response.py
@@ -5,8 +5,8 @@
from ...._models import BaseModel
from ...api_openai_api_key_info import APIOpenAIAPIKeyInfo
-__all__ = ["KeyCreateResponse"]
+__all__ = ["OpenAICreateResponse"]
-class KeyCreateResponse(BaseModel):
+class OpenAICreateResponse(BaseModel):
api_key_info: Optional[APIOpenAIAPIKeyInfo] = None
diff --git a/src/gradientai/types/model_providers/openai/key_delete_response.py b/src/gradientai/types/models/providers/openai_delete_response.py
similarity index 78%
rename from src/gradientai/types/model_providers/openai/key_delete_response.py
rename to src/gradientai/types/models/providers/openai_delete_response.py
index 0c8922bb..080a251f 100644
--- a/src/gradientai/types/model_providers/openai/key_delete_response.py
+++ b/src/gradientai/types/models/providers/openai_delete_response.py
@@ -5,8 +5,8 @@
from ...._models import BaseModel
from ...api_openai_api_key_info import APIOpenAIAPIKeyInfo
-__all__ = ["KeyDeleteResponse"]
+__all__ = ["OpenAIDeleteResponse"]
-class KeyDeleteResponse(BaseModel):
+class OpenAIDeleteResponse(BaseModel):
api_key_info: Optional[APIOpenAIAPIKeyInfo] = None
diff --git a/src/gradientai/types/model_providers/openai/key_list_params.py b/src/gradientai/types/models/providers/openai_list_params.py
similarity index 75%
rename from src/gradientai/types/model_providers/openai/key_list_params.py
rename to src/gradientai/types/models/providers/openai_list_params.py
index a11458ad..e5b86b8d 100644
--- a/src/gradientai/types/model_providers/openai/key_list_params.py
+++ b/src/gradientai/types/models/providers/openai_list_params.py
@@ -4,10 +4,10 @@
from typing_extensions import TypedDict
-__all__ = ["KeyListParams"]
+__all__ = ["OpenAIListParams"]
-class KeyListParams(TypedDict, total=False):
+class OpenAIListParams(TypedDict, total=False):
page: int
"""page number."""
diff --git a/src/gradientai/types/model_providers/openai/key_list_response.py b/src/gradientai/types/models/providers/openai_list_response.py
similarity index 86%
rename from src/gradientai/types/model_providers/openai/key_list_response.py
rename to src/gradientai/types/models/providers/openai_list_response.py
index 362b5dd6..edbd9fb4 100644
--- a/src/gradientai/types/model_providers/openai/key_list_response.py
+++ b/src/gradientai/types/models/providers/openai_list_response.py
@@ -7,10 +7,10 @@
from ...shared.api_links import APILinks
from ...api_openai_api_key_info import APIOpenAIAPIKeyInfo
-__all__ = ["KeyListResponse"]
+__all__ = ["OpenAIListResponse"]
-class KeyListResponse(BaseModel):
+class OpenAIListResponse(BaseModel):
api_key_infos: Optional[List[APIOpenAIAPIKeyInfo]] = None
links: Optional[APILinks] = None
diff --git a/src/gradientai/types/model_providers/anthropic/key_list_agents_params.py b/src/gradientai/types/models/providers/openai_retrieve_agents_params.py
similarity index 71%
rename from src/gradientai/types/model_providers/anthropic/key_list_agents_params.py
rename to src/gradientai/types/models/providers/openai_retrieve_agents_params.py
index ebbc3b7e..8a41eaf9 100644
--- a/src/gradientai/types/model_providers/anthropic/key_list_agents_params.py
+++ b/src/gradientai/types/models/providers/openai_retrieve_agents_params.py
@@ -4,10 +4,10 @@
from typing_extensions import TypedDict
-__all__ = ["KeyListAgentsParams"]
+__all__ = ["OpenAIRetrieveAgentsParams"]
-class KeyListAgentsParams(TypedDict, total=False):
+class OpenAIRetrieveAgentsParams(TypedDict, total=False):
page: int
"""page number."""
diff --git a/src/gradientai/types/model_providers/openai/key_retrieve_agents_response.py b/src/gradientai/types/models/providers/openai_retrieve_agents_response.py
similarity index 82%
rename from src/gradientai/types/model_providers/openai/key_retrieve_agents_response.py
rename to src/gradientai/types/models/providers/openai_retrieve_agents_response.py
index 56808bac..b3166636 100644
--- a/src/gradientai/types/model_providers/openai/key_retrieve_agents_response.py
+++ b/src/gradientai/types/models/providers/openai_retrieve_agents_response.py
@@ -8,10 +8,10 @@
from ...shared.api_meta import APIMeta
from ...shared.api_links import APILinks
-__all__ = ["KeyRetrieveAgentsResponse"]
+__all__ = ["OpenAIRetrieveAgentsResponse"]
-class KeyRetrieveAgentsResponse(BaseModel):
+class OpenAIRetrieveAgentsResponse(BaseModel):
agents: Optional[List["APIAgent"]] = None
links: Optional[APILinks] = None
diff --git a/src/gradientai/types/model_providers/openai/key_update_response.py b/src/gradientai/types/models/providers/openai_retrieve_response.py
similarity index 77%
rename from src/gradientai/types/model_providers/openai/key_update_response.py
rename to src/gradientai/types/models/providers/openai_retrieve_response.py
index 4889f994..ef23b966 100644
--- a/src/gradientai/types/model_providers/openai/key_update_response.py
+++ b/src/gradientai/types/models/providers/openai_retrieve_response.py
@@ -5,8 +5,8 @@
from ...._models import BaseModel
from ...api_openai_api_key_info import APIOpenAIAPIKeyInfo
-__all__ = ["KeyUpdateResponse"]
+__all__ = ["OpenAIRetrieveResponse"]
-class KeyUpdateResponse(BaseModel):
+class OpenAIRetrieveResponse(BaseModel):
api_key_info: Optional[APIOpenAIAPIKeyInfo] = None
diff --git a/src/gradientai/types/model_providers/anthropic/key_update_params.py b/src/gradientai/types/models/providers/openai_update_params.py
similarity index 79%
rename from src/gradientai/types/model_providers/anthropic/key_update_params.py
rename to src/gradientai/types/models/providers/openai_update_params.py
index c07d7f66..ab5d02cf 100644
--- a/src/gradientai/types/model_providers/anthropic/key_update_params.py
+++ b/src/gradientai/types/models/providers/openai_update_params.py
@@ -6,10 +6,10 @@
from ...._utils import PropertyInfo
-__all__ = ["KeyUpdateParams"]
+__all__ = ["OpenAIUpdateParams"]
-class KeyUpdateParams(TypedDict, total=False):
+class OpenAIUpdateParams(TypedDict, total=False):
api_key: str
body_api_key_uuid: Annotated[str, PropertyInfo(alias="api_key_uuid")]
diff --git a/src/gradientai/types/model_providers/openai/key_retrieve_response.py b/src/gradientai/types/models/providers/openai_update_response.py
similarity index 78%
rename from src/gradientai/types/model_providers/openai/key_retrieve_response.py
rename to src/gradientai/types/models/providers/openai_update_response.py
index 7015b6f7..9bb80518 100644
--- a/src/gradientai/types/model_providers/openai/key_retrieve_response.py
+++ b/src/gradientai/types/models/providers/openai_update_response.py
@@ -5,8 +5,8 @@
from ...._models import BaseModel
from ...api_openai_api_key_info import APIOpenAIAPIKeyInfo
-__all__ = ["KeyRetrieveResponse"]
+__all__ = ["OpenAIUpdateResponse"]
-class KeyRetrieveResponse(BaseModel):
+class OpenAIUpdateResponse(BaseModel):
api_key_info: Optional[APIOpenAIAPIKeyInfo] = None
diff --git a/tests/api_resources/model_providers/openai/__init__.py b/tests/api_resources/model_providers/openai/__init__.py
deleted file mode 100644
index fd8019a9..00000000
--- a/tests/api_resources/model_providers/openai/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
diff --git a/tests/api_resources/model_providers/__init__.py b/tests/api_resources/models/__init__.py
similarity index 100%
rename from tests/api_resources/model_providers/__init__.py
rename to tests/api_resources/models/__init__.py
diff --git a/tests/api_resources/model_providers/anthropic/__init__.py b/tests/api_resources/models/providers/__init__.py
similarity index 100%
rename from tests/api_resources/model_providers/anthropic/__init__.py
rename to tests/api_resources/models/providers/__init__.py
diff --git a/tests/api_resources/model_providers/anthropic/test_keys.py b/tests/api_resources/models/providers/test_anthropic.py
similarity index 59%
rename from tests/api_resources/model_providers/anthropic/test_keys.py
rename to tests/api_resources/models/providers/test_anthropic.py
index fd4ffb0f..79bfcdc3 100644
--- a/tests/api_resources/model_providers/anthropic/test_keys.py
+++ b/tests/api_resources/models/providers/test_anthropic.py
@@ -9,89 +9,89 @@
from gradientai import GradientAI, AsyncGradientAI
from tests.utils import assert_matches_type
-from gradientai.types.model_providers.anthropic import (
- KeyListResponse,
- KeyCreateResponse,
- KeyDeleteResponse,
- KeyUpdateResponse,
- KeyRetrieveResponse,
- KeyListAgentsResponse,
+from gradientai.types.models.providers import (
+ AnthropicListResponse,
+ AnthropicCreateResponse,
+ AnthropicDeleteResponse,
+ AnthropicUpdateResponse,
+ AnthropicRetrieveResponse,
+ AnthropicListAgentsResponse,
)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-class TestKeys:
+class TestAnthropic:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
@pytest.mark.skip()
@parametrize
def test_method_create(self, client: GradientAI) -> None:
- key = client.model_providers.anthropic.keys.create()
- assert_matches_type(KeyCreateResponse, key, path=["response"])
+ anthropic = client.models.providers.anthropic.create()
+ assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_create_with_all_params(self, client: GradientAI) -> None:
- key = client.model_providers.anthropic.keys.create(
+ anthropic = client.models.providers.anthropic.create(
api_key="api_key",
name="name",
)
- assert_matches_type(KeyCreateResponse, key, path=["response"])
+ assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_create(self, client: GradientAI) -> None:
- response = client.model_providers.anthropic.keys.with_raw_response.create()
+ response = client.models.providers.anthropic.with_raw_response.create()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyCreateResponse, key, path=["response"])
+ anthropic = response.parse()
+ assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_create(self, client: GradientAI) -> None:
- with client.model_providers.anthropic.keys.with_streaming_response.create() as response:
+ with client.models.providers.anthropic.with_streaming_response.create() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyCreateResponse, key, path=["response"])
+ anthropic = response.parse()
+ assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
def test_method_retrieve(self, client: GradientAI) -> None:
- key = client.model_providers.anthropic.keys.retrieve(
+ anthropic = client.models.providers.anthropic.retrieve(
"api_key_uuid",
)
- assert_matches_type(KeyRetrieveResponse, key, path=["response"])
+ assert_matches_type(AnthropicRetrieveResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.model_providers.anthropic.keys.with_raw_response.retrieve(
+ response = client.models.providers.anthropic.with_raw_response.retrieve(
"api_key_uuid",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyRetrieveResponse, key, path=["response"])
+ anthropic = response.parse()
+ assert_matches_type(AnthropicRetrieveResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.model_providers.anthropic.keys.with_streaming_response.retrieve(
+ with client.models.providers.anthropic.with_streaming_response.retrieve(
"api_key_uuid",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyRetrieveResponse, key, path=["response"])
+ anthropic = response.parse()
+ assert_matches_type(AnthropicRetrieveResponse, anthropic, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -99,52 +99,52 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None:
@parametrize
def test_path_params_retrieve(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
- client.model_providers.anthropic.keys.with_raw_response.retrieve(
+ client.models.providers.anthropic.with_raw_response.retrieve(
"",
)
@pytest.mark.skip()
@parametrize
def test_method_update(self, client: GradientAI) -> None:
- key = client.model_providers.anthropic.keys.update(
+ anthropic = client.models.providers.anthropic.update(
path_api_key_uuid="api_key_uuid",
)
- assert_matches_type(KeyUpdateResponse, key, path=["response"])
+ assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_update_with_all_params(self, client: GradientAI) -> None:
- key = client.model_providers.anthropic.keys.update(
+ anthropic = client.models.providers.anthropic.update(
path_api_key_uuid="api_key_uuid",
api_key="api_key",
body_api_key_uuid="api_key_uuid",
name="name",
)
- assert_matches_type(KeyUpdateResponse, key, path=["response"])
+ assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_update(self, client: GradientAI) -> None:
- response = client.model_providers.anthropic.keys.with_raw_response.update(
+ response = client.models.providers.anthropic.with_raw_response.update(
path_api_key_uuid="api_key_uuid",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyUpdateResponse, key, path=["response"])
+ anthropic = response.parse()
+ assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_update(self, client: GradientAI) -> None:
- with client.model_providers.anthropic.keys.with_streaming_response.update(
+ with client.models.providers.anthropic.with_streaming_response.update(
path_api_key_uuid="api_key_uuid",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyUpdateResponse, key, path=["response"])
+ anthropic = response.parse()
+ assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -152,78 +152,78 @@ def test_streaming_response_update(self, client: GradientAI) -> None:
@parametrize
def test_path_params_update(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"):
- client.model_providers.anthropic.keys.with_raw_response.update(
+ client.models.providers.anthropic.with_raw_response.update(
path_api_key_uuid="",
)
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- key = client.model_providers.anthropic.keys.list()
- assert_matches_type(KeyListResponse, key, path=["response"])
+ anthropic = client.models.providers.anthropic.list()
+ assert_matches_type(AnthropicListResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_with_all_params(self, client: GradientAI) -> None:
- key = client.model_providers.anthropic.keys.list(
+ anthropic = client.models.providers.anthropic.list(
page=0,
per_page=0,
)
- assert_matches_type(KeyListResponse, key, path=["response"])
+ assert_matches_type(AnthropicListResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.model_providers.anthropic.keys.with_raw_response.list()
+ response = client.models.providers.anthropic.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyListResponse, key, path=["response"])
+ anthropic = response.parse()
+ assert_matches_type(AnthropicListResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.model_providers.anthropic.keys.with_streaming_response.list() as response:
+ with client.models.providers.anthropic.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyListResponse, key, path=["response"])
+ anthropic = response.parse()
+ assert_matches_type(AnthropicListResponse, anthropic, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
def test_method_delete(self, client: GradientAI) -> None:
- key = client.model_providers.anthropic.keys.delete(
+ anthropic = client.models.providers.anthropic.delete(
"api_key_uuid",
)
- assert_matches_type(KeyDeleteResponse, key, path=["response"])
+ assert_matches_type(AnthropicDeleteResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_delete(self, client: GradientAI) -> None:
- response = client.model_providers.anthropic.keys.with_raw_response.delete(
+ response = client.models.providers.anthropic.with_raw_response.delete(
"api_key_uuid",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyDeleteResponse, key, path=["response"])
+ anthropic = response.parse()
+ assert_matches_type(AnthropicDeleteResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete(self, client: GradientAI) -> None:
- with client.model_providers.anthropic.keys.with_streaming_response.delete(
+ with client.models.providers.anthropic.with_streaming_response.delete(
"api_key_uuid",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyDeleteResponse, key, path=["response"])
+ anthropic = response.parse()
+ assert_matches_type(AnthropicDeleteResponse, anthropic, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -231,51 +231,51 @@ def test_streaming_response_delete(self, client: GradientAI) -> None:
@parametrize
def test_path_params_delete(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
- client.model_providers.anthropic.keys.with_raw_response.delete(
+ client.models.providers.anthropic.with_raw_response.delete(
"",
)
@pytest.mark.skip()
@parametrize
def test_method_list_agents(self, client: GradientAI) -> None:
- key = client.model_providers.anthropic.keys.list_agents(
+ anthropic = client.models.providers.anthropic.list_agents(
uuid="uuid",
)
- assert_matches_type(KeyListAgentsResponse, key, path=["response"])
+ assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_agents_with_all_params(self, client: GradientAI) -> None:
- key = client.model_providers.anthropic.keys.list_agents(
+ anthropic = client.models.providers.anthropic.list_agents(
uuid="uuid",
page=0,
per_page=0,
)
- assert_matches_type(KeyListAgentsResponse, key, path=["response"])
+ assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_list_agents(self, client: GradientAI) -> None:
- response = client.model_providers.anthropic.keys.with_raw_response.list_agents(
+ response = client.models.providers.anthropic.with_raw_response.list_agents(
uuid="uuid",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyListAgentsResponse, key, path=["response"])
+ anthropic = response.parse()
+ assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_list_agents(self, client: GradientAI) -> None:
- with client.model_providers.anthropic.keys.with_streaming_response.list_agents(
+ with client.models.providers.anthropic.with_streaming_response.list_agents(
uuid="uuid",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyListAgentsResponse, key, path=["response"])
+ anthropic = response.parse()
+ assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -283,12 +283,12 @@ def test_streaming_response_list_agents(self, client: GradientAI) -> None:
@parametrize
def test_path_params_list_agents(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
- client.model_providers.anthropic.keys.with_raw_response.list_agents(
+ client.models.providers.anthropic.with_raw_response.list_agents(
uuid="",
)
-class TestAsyncKeys:
+class TestAsyncAnthropic:
parametrize = pytest.mark.parametrize(
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
@@ -296,71 +296,71 @@ class TestAsyncKeys:
@pytest.mark.skip()
@parametrize
async def test_method_create(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.anthropic.keys.create()
- assert_matches_type(KeyCreateResponse, key, path=["response"])
+ anthropic = await async_client.models.providers.anthropic.create()
+ assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.anthropic.keys.create(
+ anthropic = await async_client.models.providers.anthropic.create(
api_key="api_key",
name="name",
)
- assert_matches_type(KeyCreateResponse, key, path=["response"])
+ assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.model_providers.anthropic.keys.with_raw_response.create()
+ response = await async_client.models.providers.anthropic.with_raw_response.create()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyCreateResponse, key, path=["response"])
+ anthropic = await response.parse()
+ assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None:
- async with async_client.model_providers.anthropic.keys.with_streaming_response.create() as response:
+ async with async_client.models.providers.anthropic.with_streaming_response.create() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyCreateResponse, key, path=["response"])
+ anthropic = await response.parse()
+ assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.anthropic.keys.retrieve(
+ anthropic = await async_client.models.providers.anthropic.retrieve(
"api_key_uuid",
)
- assert_matches_type(KeyRetrieveResponse, key, path=["response"])
+ assert_matches_type(AnthropicRetrieveResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.model_providers.anthropic.keys.with_raw_response.retrieve(
+ response = await async_client.models.providers.anthropic.with_raw_response.retrieve(
"api_key_uuid",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyRetrieveResponse, key, path=["response"])
+ anthropic = await response.parse()
+ assert_matches_type(AnthropicRetrieveResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.model_providers.anthropic.keys.with_streaming_response.retrieve(
+ async with async_client.models.providers.anthropic.with_streaming_response.retrieve(
"api_key_uuid",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyRetrieveResponse, key, path=["response"])
+ anthropic = await response.parse()
+ assert_matches_type(AnthropicRetrieveResponse, anthropic, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -368,52 +368,52 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI)
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
- await async_client.model_providers.anthropic.keys.with_raw_response.retrieve(
+ await async_client.models.providers.anthropic.with_raw_response.retrieve(
"",
)
@pytest.mark.skip()
@parametrize
async def test_method_update(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.anthropic.keys.update(
+ anthropic = await async_client.models.providers.anthropic.update(
path_api_key_uuid="api_key_uuid",
)
- assert_matches_type(KeyUpdateResponse, key, path=["response"])
+ assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.anthropic.keys.update(
+ anthropic = await async_client.models.providers.anthropic.update(
path_api_key_uuid="api_key_uuid",
api_key="api_key",
body_api_key_uuid="api_key_uuid",
name="name",
)
- assert_matches_type(KeyUpdateResponse, key, path=["response"])
+ assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.model_providers.anthropic.keys.with_raw_response.update(
+ response = await async_client.models.providers.anthropic.with_raw_response.update(
path_api_key_uuid="api_key_uuid",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyUpdateResponse, key, path=["response"])
+ anthropic = await response.parse()
+ assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None:
- async with async_client.model_providers.anthropic.keys.with_streaming_response.update(
+ async with async_client.models.providers.anthropic.with_streaming_response.update(
path_api_key_uuid="api_key_uuid",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyUpdateResponse, key, path=["response"])
+ anthropic = await response.parse()
+ assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -421,78 +421,78 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_update(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"):
- await async_client.model_providers.anthropic.keys.with_raw_response.update(
+ await async_client.models.providers.anthropic.with_raw_response.update(
path_api_key_uuid="",
)
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.anthropic.keys.list()
- assert_matches_type(KeyListResponse, key, path=["response"])
+ anthropic = await async_client.models.providers.anthropic.list()
+ assert_matches_type(AnthropicListResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.anthropic.keys.list(
+ anthropic = await async_client.models.providers.anthropic.list(
page=0,
per_page=0,
)
- assert_matches_type(KeyListResponse, key, path=["response"])
+ assert_matches_type(AnthropicListResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.model_providers.anthropic.keys.with_raw_response.list()
+ response = await async_client.models.providers.anthropic.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyListResponse, key, path=["response"])
+ anthropic = await response.parse()
+ assert_matches_type(AnthropicListResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.model_providers.anthropic.keys.with_streaming_response.list() as response:
+ async with async_client.models.providers.anthropic.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyListResponse, key, path=["response"])
+ anthropic = await response.parse()
+ assert_matches_type(AnthropicListResponse, anthropic, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.anthropic.keys.delete(
+ anthropic = await async_client.models.providers.anthropic.delete(
"api_key_uuid",
)
- assert_matches_type(KeyDeleteResponse, key, path=["response"])
+ assert_matches_type(AnthropicDeleteResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.model_providers.anthropic.keys.with_raw_response.delete(
+ response = await async_client.models.providers.anthropic.with_raw_response.delete(
"api_key_uuid",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyDeleteResponse, key, path=["response"])
+ anthropic = await response.parse()
+ assert_matches_type(AnthropicDeleteResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None:
- async with async_client.model_providers.anthropic.keys.with_streaming_response.delete(
+ async with async_client.models.providers.anthropic.with_streaming_response.delete(
"api_key_uuid",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyDeleteResponse, key, path=["response"])
+ anthropic = await response.parse()
+ assert_matches_type(AnthropicDeleteResponse, anthropic, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -500,51 +500,51 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
- await async_client.model_providers.anthropic.keys.with_raw_response.delete(
+ await async_client.models.providers.anthropic.with_raw_response.delete(
"",
)
@pytest.mark.skip()
@parametrize
async def test_method_list_agents(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.anthropic.keys.list_agents(
+ anthropic = await async_client.models.providers.anthropic.list_agents(
uuid="uuid",
)
- assert_matches_type(KeyListAgentsResponse, key, path=["response"])
+ assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_agents_with_all_params(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.anthropic.keys.list_agents(
+ anthropic = await async_client.models.providers.anthropic.list_agents(
uuid="uuid",
page=0,
per_page=0,
)
- assert_matches_type(KeyListAgentsResponse, key, path=["response"])
+ assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_list_agents(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.model_providers.anthropic.keys.with_raw_response.list_agents(
+ response = await async_client.models.providers.anthropic.with_raw_response.list_agents(
uuid="uuid",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyListAgentsResponse, key, path=["response"])
+ anthropic = await response.parse()
+ assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list_agents(self, async_client: AsyncGradientAI) -> None:
- async with async_client.model_providers.anthropic.keys.with_streaming_response.list_agents(
+ async with async_client.models.providers.anthropic.with_streaming_response.list_agents(
uuid="uuid",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyListAgentsResponse, key, path=["response"])
+ anthropic = await response.parse()
+ assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -552,6 +552,6 @@ async def test_streaming_response_list_agents(self, async_client: AsyncGradientA
@parametrize
async def test_path_params_list_agents(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
- await async_client.model_providers.anthropic.keys.with_raw_response.list_agents(
+ await async_client.models.providers.anthropic.with_raw_response.list_agents(
uuid="",
)
diff --git a/tests/api_resources/model_providers/openai/test_keys.py b/tests/api_resources/models/providers/test_openai.py
similarity index 62%
rename from tests/api_resources/model_providers/openai/test_keys.py
rename to tests/api_resources/models/providers/test_openai.py
index f0f1eda0..2640601e 100644
--- a/tests/api_resources/model_providers/openai/test_keys.py
+++ b/tests/api_resources/models/providers/test_openai.py
@@ -9,89 +9,89 @@
from gradientai import GradientAI, AsyncGradientAI
from tests.utils import assert_matches_type
-from gradientai.types.model_providers.openai import (
- KeyListResponse,
- KeyCreateResponse,
- KeyDeleteResponse,
- KeyUpdateResponse,
- KeyRetrieveResponse,
- KeyRetrieveAgentsResponse,
+from gradientai.types.models.providers import (
+ OpenAIListResponse,
+ OpenAICreateResponse,
+ OpenAIDeleteResponse,
+ OpenAIUpdateResponse,
+ OpenAIRetrieveResponse,
+ OpenAIRetrieveAgentsResponse,
)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-class TestKeys:
+class TestOpenAI:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
@pytest.mark.skip()
@parametrize
def test_method_create(self, client: GradientAI) -> None:
- key = client.model_providers.openai.keys.create()
- assert_matches_type(KeyCreateResponse, key, path=["response"])
+ openai = client.models.providers.openai.create()
+ assert_matches_type(OpenAICreateResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_create_with_all_params(self, client: GradientAI) -> None:
- key = client.model_providers.openai.keys.create(
+ openai = client.models.providers.openai.create(
api_key="api_key",
name="name",
)
- assert_matches_type(KeyCreateResponse, key, path=["response"])
+ assert_matches_type(OpenAICreateResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_create(self, client: GradientAI) -> None:
- response = client.model_providers.openai.keys.with_raw_response.create()
+ response = client.models.providers.openai.with_raw_response.create()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyCreateResponse, key, path=["response"])
+ openai = response.parse()
+ assert_matches_type(OpenAICreateResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_create(self, client: GradientAI) -> None:
- with client.model_providers.openai.keys.with_streaming_response.create() as response:
+ with client.models.providers.openai.with_streaming_response.create() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyCreateResponse, key, path=["response"])
+ openai = response.parse()
+ assert_matches_type(OpenAICreateResponse, openai, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
def test_method_retrieve(self, client: GradientAI) -> None:
- key = client.model_providers.openai.keys.retrieve(
+ openai = client.models.providers.openai.retrieve(
"api_key_uuid",
)
- assert_matches_type(KeyRetrieveResponse, key, path=["response"])
+ assert_matches_type(OpenAIRetrieveResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.model_providers.openai.keys.with_raw_response.retrieve(
+ response = client.models.providers.openai.with_raw_response.retrieve(
"api_key_uuid",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyRetrieveResponse, key, path=["response"])
+ openai = response.parse()
+ assert_matches_type(OpenAIRetrieveResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.model_providers.openai.keys.with_streaming_response.retrieve(
+ with client.models.providers.openai.with_streaming_response.retrieve(
"api_key_uuid",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyRetrieveResponse, key, path=["response"])
+ openai = response.parse()
+ assert_matches_type(OpenAIRetrieveResponse, openai, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -99,52 +99,52 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None:
@parametrize
def test_path_params_retrieve(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
- client.model_providers.openai.keys.with_raw_response.retrieve(
+ client.models.providers.openai.with_raw_response.retrieve(
"",
)
@pytest.mark.skip()
@parametrize
def test_method_update(self, client: GradientAI) -> None:
- key = client.model_providers.openai.keys.update(
+ openai = client.models.providers.openai.update(
path_api_key_uuid="api_key_uuid",
)
- assert_matches_type(KeyUpdateResponse, key, path=["response"])
+ assert_matches_type(OpenAIUpdateResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_update_with_all_params(self, client: GradientAI) -> None:
- key = client.model_providers.openai.keys.update(
+ openai = client.models.providers.openai.update(
path_api_key_uuid="api_key_uuid",
api_key="api_key",
body_api_key_uuid="api_key_uuid",
name="name",
)
- assert_matches_type(KeyUpdateResponse, key, path=["response"])
+ assert_matches_type(OpenAIUpdateResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_update(self, client: GradientAI) -> None:
- response = client.model_providers.openai.keys.with_raw_response.update(
+ response = client.models.providers.openai.with_raw_response.update(
path_api_key_uuid="api_key_uuid",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyUpdateResponse, key, path=["response"])
+ openai = response.parse()
+ assert_matches_type(OpenAIUpdateResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_update(self, client: GradientAI) -> None:
- with client.model_providers.openai.keys.with_streaming_response.update(
+ with client.models.providers.openai.with_streaming_response.update(
path_api_key_uuid="api_key_uuid",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyUpdateResponse, key, path=["response"])
+ openai = response.parse()
+ assert_matches_type(OpenAIUpdateResponse, openai, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -152,78 +152,78 @@ def test_streaming_response_update(self, client: GradientAI) -> None:
@parametrize
def test_path_params_update(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"):
- client.model_providers.openai.keys.with_raw_response.update(
+ client.models.providers.openai.with_raw_response.update(
path_api_key_uuid="",
)
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- key = client.model_providers.openai.keys.list()
- assert_matches_type(KeyListResponse, key, path=["response"])
+ openai = client.models.providers.openai.list()
+ assert_matches_type(OpenAIListResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_with_all_params(self, client: GradientAI) -> None:
- key = client.model_providers.openai.keys.list(
+ openai = client.models.providers.openai.list(
page=0,
per_page=0,
)
- assert_matches_type(KeyListResponse, key, path=["response"])
+ assert_matches_type(OpenAIListResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.model_providers.openai.keys.with_raw_response.list()
+ response = client.models.providers.openai.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyListResponse, key, path=["response"])
+ openai = response.parse()
+ assert_matches_type(OpenAIListResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.model_providers.openai.keys.with_streaming_response.list() as response:
+ with client.models.providers.openai.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyListResponse, key, path=["response"])
+ openai = response.parse()
+ assert_matches_type(OpenAIListResponse, openai, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
def test_method_delete(self, client: GradientAI) -> None:
- key = client.model_providers.openai.keys.delete(
+ openai = client.models.providers.openai.delete(
"api_key_uuid",
)
- assert_matches_type(KeyDeleteResponse, key, path=["response"])
+ assert_matches_type(OpenAIDeleteResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_delete(self, client: GradientAI) -> None:
- response = client.model_providers.openai.keys.with_raw_response.delete(
+ response = client.models.providers.openai.with_raw_response.delete(
"api_key_uuid",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyDeleteResponse, key, path=["response"])
+ openai = response.parse()
+ assert_matches_type(OpenAIDeleteResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete(self, client: GradientAI) -> None:
- with client.model_providers.openai.keys.with_streaming_response.delete(
+ with client.models.providers.openai.with_streaming_response.delete(
"api_key_uuid",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyDeleteResponse, key, path=["response"])
+ openai = response.parse()
+ assert_matches_type(OpenAIDeleteResponse, openai, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -231,51 +231,51 @@ def test_streaming_response_delete(self, client: GradientAI) -> None:
@parametrize
def test_path_params_delete(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
- client.model_providers.openai.keys.with_raw_response.delete(
+ client.models.providers.openai.with_raw_response.delete(
"",
)
@pytest.mark.skip()
@parametrize
def test_method_retrieve_agents(self, client: GradientAI) -> None:
- key = client.model_providers.openai.keys.retrieve_agents(
+ openai = client.models.providers.openai.retrieve_agents(
uuid="uuid",
)
- assert_matches_type(KeyRetrieveAgentsResponse, key, path=["response"])
+ assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_retrieve_agents_with_all_params(self, client: GradientAI) -> None:
- key = client.model_providers.openai.keys.retrieve_agents(
+ openai = client.models.providers.openai.retrieve_agents(
uuid="uuid",
page=0,
per_page=0,
)
- assert_matches_type(KeyRetrieveAgentsResponse, key, path=["response"])
+ assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve_agents(self, client: GradientAI) -> None:
- response = client.model_providers.openai.keys.with_raw_response.retrieve_agents(
+ response = client.models.providers.openai.with_raw_response.retrieve_agents(
uuid="uuid",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyRetrieveAgentsResponse, key, path=["response"])
+ openai = response.parse()
+ assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve_agents(self, client: GradientAI) -> None:
- with client.model_providers.openai.keys.with_streaming_response.retrieve_agents(
+ with client.models.providers.openai.with_streaming_response.retrieve_agents(
uuid="uuid",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = response.parse()
- assert_matches_type(KeyRetrieveAgentsResponse, key, path=["response"])
+ openai = response.parse()
+ assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -283,12 +283,12 @@ def test_streaming_response_retrieve_agents(self, client: GradientAI) -> None:
@parametrize
def test_path_params_retrieve_agents(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
- client.model_providers.openai.keys.with_raw_response.retrieve_agents(
+ client.models.providers.openai.with_raw_response.retrieve_agents(
uuid="",
)
-class TestAsyncKeys:
+class TestAsyncOpenAI:
parametrize = pytest.mark.parametrize(
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
@@ -296,71 +296,71 @@ class TestAsyncKeys:
@pytest.mark.skip()
@parametrize
async def test_method_create(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.openai.keys.create()
- assert_matches_type(KeyCreateResponse, key, path=["response"])
+ openai = await async_client.models.providers.openai.create()
+ assert_matches_type(OpenAICreateResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.openai.keys.create(
+ openai = await async_client.models.providers.openai.create(
api_key="api_key",
name="name",
)
- assert_matches_type(KeyCreateResponse, key, path=["response"])
+ assert_matches_type(OpenAICreateResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.model_providers.openai.keys.with_raw_response.create()
+ response = await async_client.models.providers.openai.with_raw_response.create()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyCreateResponse, key, path=["response"])
+ openai = await response.parse()
+ assert_matches_type(OpenAICreateResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None:
- async with async_client.model_providers.openai.keys.with_streaming_response.create() as response:
+ async with async_client.models.providers.openai.with_streaming_response.create() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyCreateResponse, key, path=["response"])
+ openai = await response.parse()
+ assert_matches_type(OpenAICreateResponse, openai, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.openai.keys.retrieve(
+ openai = await async_client.models.providers.openai.retrieve(
"api_key_uuid",
)
- assert_matches_type(KeyRetrieveResponse, key, path=["response"])
+ assert_matches_type(OpenAIRetrieveResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.model_providers.openai.keys.with_raw_response.retrieve(
+ response = await async_client.models.providers.openai.with_raw_response.retrieve(
"api_key_uuid",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyRetrieveResponse, key, path=["response"])
+ openai = await response.parse()
+ assert_matches_type(OpenAIRetrieveResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.model_providers.openai.keys.with_streaming_response.retrieve(
+ async with async_client.models.providers.openai.with_streaming_response.retrieve(
"api_key_uuid",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyRetrieveResponse, key, path=["response"])
+ openai = await response.parse()
+ assert_matches_type(OpenAIRetrieveResponse, openai, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -368,52 +368,52 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI)
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
- await async_client.model_providers.openai.keys.with_raw_response.retrieve(
+ await async_client.models.providers.openai.with_raw_response.retrieve(
"",
)
@pytest.mark.skip()
@parametrize
async def test_method_update(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.openai.keys.update(
+ openai = await async_client.models.providers.openai.update(
path_api_key_uuid="api_key_uuid",
)
- assert_matches_type(KeyUpdateResponse, key, path=["response"])
+ assert_matches_type(OpenAIUpdateResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.openai.keys.update(
+ openai = await async_client.models.providers.openai.update(
path_api_key_uuid="api_key_uuid",
api_key="api_key",
body_api_key_uuid="api_key_uuid",
name="name",
)
- assert_matches_type(KeyUpdateResponse, key, path=["response"])
+ assert_matches_type(OpenAIUpdateResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.model_providers.openai.keys.with_raw_response.update(
+ response = await async_client.models.providers.openai.with_raw_response.update(
path_api_key_uuid="api_key_uuid",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyUpdateResponse, key, path=["response"])
+ openai = await response.parse()
+ assert_matches_type(OpenAIUpdateResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None:
- async with async_client.model_providers.openai.keys.with_streaming_response.update(
+ async with async_client.models.providers.openai.with_streaming_response.update(
path_api_key_uuid="api_key_uuid",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyUpdateResponse, key, path=["response"])
+ openai = await response.parse()
+ assert_matches_type(OpenAIUpdateResponse, openai, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -421,78 +421,78 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_update(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"):
- await async_client.model_providers.openai.keys.with_raw_response.update(
+ await async_client.models.providers.openai.with_raw_response.update(
path_api_key_uuid="",
)
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.openai.keys.list()
- assert_matches_type(KeyListResponse, key, path=["response"])
+ openai = await async_client.models.providers.openai.list()
+ assert_matches_type(OpenAIListResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.openai.keys.list(
+ openai = await async_client.models.providers.openai.list(
page=0,
per_page=0,
)
- assert_matches_type(KeyListResponse, key, path=["response"])
+ assert_matches_type(OpenAIListResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.model_providers.openai.keys.with_raw_response.list()
+ response = await async_client.models.providers.openai.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyListResponse, key, path=["response"])
+ openai = await response.parse()
+ assert_matches_type(OpenAIListResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.model_providers.openai.keys.with_streaming_response.list() as response:
+ async with async_client.models.providers.openai.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyListResponse, key, path=["response"])
+ openai = await response.parse()
+ assert_matches_type(OpenAIListResponse, openai, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.openai.keys.delete(
+ openai = await async_client.models.providers.openai.delete(
"api_key_uuid",
)
- assert_matches_type(KeyDeleteResponse, key, path=["response"])
+ assert_matches_type(OpenAIDeleteResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.model_providers.openai.keys.with_raw_response.delete(
+ response = await async_client.models.providers.openai.with_raw_response.delete(
"api_key_uuid",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyDeleteResponse, key, path=["response"])
+ openai = await response.parse()
+ assert_matches_type(OpenAIDeleteResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None:
- async with async_client.model_providers.openai.keys.with_streaming_response.delete(
+ async with async_client.models.providers.openai.with_streaming_response.delete(
"api_key_uuid",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyDeleteResponse, key, path=["response"])
+ openai = await response.parse()
+ assert_matches_type(OpenAIDeleteResponse, openai, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -500,51 +500,51 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
- await async_client.model_providers.openai.keys.with_raw_response.delete(
+ await async_client.models.providers.openai.with_raw_response.delete(
"",
)
@pytest.mark.skip()
@parametrize
async def test_method_retrieve_agents(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.openai.keys.retrieve_agents(
+ openai = await async_client.models.providers.openai.retrieve_agents(
uuid="uuid",
)
- assert_matches_type(KeyRetrieveAgentsResponse, key, path=["response"])
+ assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_retrieve_agents_with_all_params(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.model_providers.openai.keys.retrieve_agents(
+ openai = await async_client.models.providers.openai.retrieve_agents(
uuid="uuid",
page=0,
per_page=0,
)
- assert_matches_type(KeyRetrieveAgentsResponse, key, path=["response"])
+ assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve_agents(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.model_providers.openai.keys.with_raw_response.retrieve_agents(
+ response = await async_client.models.providers.openai.with_raw_response.retrieve_agents(
uuid="uuid",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyRetrieveAgentsResponse, key, path=["response"])
+ openai = await response.parse()
+ assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve_agents(self, async_client: AsyncGradientAI) -> None:
- async with async_client.model_providers.openai.keys.with_streaming_response.retrieve_agents(
+ async with async_client.models.providers.openai.with_streaming_response.retrieve_agents(
uuid="uuid",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- key = await response.parse()
- assert_matches_type(KeyRetrieveAgentsResponse, key, path=["response"])
+ openai = await response.parse()
+ assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -552,6 +552,6 @@ async def test_streaming_response_retrieve_agents(self, async_client: AsyncGradi
@parametrize
async def test_path_params_retrieve_agents(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
- await async_client.model_providers.openai.keys.with_raw_response.retrieve_agents(
+ await async_client.models.providers.openai.with_raw_response.retrieve_agents(
uuid="",
)