diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 88980c93..6bfd00b1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,12 +7,16 @@ on: - 'integrated/**' - 'stl-preview-head/**' - 'stl-preview-base/**' + pull_request: + branches-ignore: + - 'stl-preview-head/**' + - 'stl-preview-base/**' jobs: lint: timeout-minutes: 10 name: lint - runs-on: ${{ github.repository == 'stainless-sdks/digitalocean-genai-sdk-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + runs-on: ${{ github.repository == 'stainless-sdks/gradientai-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} steps: - uses: actions/checkout@v4 @@ -31,7 +35,7 @@ jobs: run: ./scripts/lint upload: - if: github.repository == 'stainless-sdks/digitalocean-genai-sdk-python' + if: github.repository == 'stainless-sdks/gradientai-python' timeout-minutes: 10 name: upload permissions: @@ -57,7 +61,7 @@ jobs: test: timeout-minutes: 10 name: test - runs-on: ${{ github.repository == 'stainless-sdks/digitalocean-genai-sdk-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + runs-on: ${{ github.repository == 'stainless-sdks/gradientai-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index 2bc5b4b2..3dcd6c42 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -1,6 +1,6 @@ # This workflow is triggered when a GitHub release is created. # It can also be run manually to re-publish to PyPI in case it failed for some reason. -# You can run this workflow by navigating to https://www.github.com/digitalocean/genai-python/actions/workflows/publish-pypi.yml +# You can run this workflow by navigating to https://www.github.com/digitalocean/gradientai-python/actions/workflows/publish-pypi.yml name: Publish PyPI on: workflow_dispatch: @@ -28,4 +28,4 @@ jobs: run: | bash ./bin/publish-pypi env: - PYPI_TOKEN: ${{ secrets.DIGITALOCEAN_GENAI_SDK_PYPI_TOKEN || secrets.PYPI_TOKEN }} + PYPI_TOKEN: ${{ secrets.GRADIENT_AI_PYPI_TOKEN || secrets.PYPI_TOKEN }} diff --git a/.github/workflows/release-doctor.yml b/.github/workflows/release-doctor.yml index 0f23cbc4..d49e26c2 100644 --- a/.github/workflows/release-doctor.yml +++ b/.github/workflows/release-doctor.yml @@ -9,7 +9,7 @@ jobs: release_doctor: name: release doctor runs-on: ubuntu-latest - if: github.repository == 'digitalocean/genai-python' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next') + if: github.repository == 'digitalocean/gradientai-python' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next') steps: - uses: actions/checkout@v4 @@ -18,4 +18,4 @@ jobs: run: | bash ./bin/check-release-environment env: - PYPI_TOKEN: ${{ secrets.DIGITALOCEAN_GENAI_SDK_PYPI_TOKEN || secrets.PYPI_TOKEN }} + PYPI_TOKEN: ${{ secrets.GRADIENT_AI_PYPI_TOKEN || secrets.PYPI_TOKEN }} diff --git a/.release-please-manifest.json b/.release-please-manifest.json index aaf968a1..b56c3d0b 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.0-alpha.3" + ".": "0.1.0-alpha.4" } \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index eb8f1c2d..f0863f5f 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 60 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fdigitalocean-genai-sdk-17838dec38ee8475c4bf4695b8dc70fe42a8f4da8ae9ffd415dc895b6628a952.yml -openapi_spec_hash: cfe5453e150989c8a9dbc9d7b4d1f76a -config_hash: 565bf6264bdf2a317cc5e2f02d02a702 +configured_endpoints: 70 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradientai-e40feaac59c85aace6aa42d2749b20e0955dbbae58b06c3a650bc03adafcd7b5.yml +openapi_spec_hash: 825c1a4816938e9f594b7a8c06692667 +config_hash: 211ece2994c6ac52f84f78ee56c1097a diff --git a/CHANGELOG.md b/CHANGELOG.md index 2f83e62e..be25824a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,63 @@ # Changelog +## 0.1.0-alpha.4 (2025-06-25) + +Full Changelog: [v0.1.0-alpha.3...v0.1.0-alpha.4](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.3...v0.1.0-alpha.4) + +### Features + +* **api:** update via SDK Studio ([d1ea884](https://github.com/digitalocean/gradientai-python/commit/d1ea884c9be72b3f8804c5ba91bf4f77a3284a6c)) +* **api:** update via SDK Studio ([584f9f1](https://github.com/digitalocean/gradientai-python/commit/584f9f1304b3612eb25f1438041d287592463438)) +* **api:** update via SDK Studio ([7aee6e5](https://github.com/digitalocean/gradientai-python/commit/7aee6e55a0574fc1b6ab73a1777c92e4f3a940ea)) +* **api:** update via SDK Studio ([4212f62](https://github.com/digitalocean/gradientai-python/commit/4212f62b19c44bcb12c02fe396e8c51dd89d3868)) +* **api:** update via SDK Studio ([b16cceb](https://github.com/digitalocean/gradientai-python/commit/b16cceb63edb4253084036b693834bde5da10943)) +* **api:** update via SDK Studio ([34382c0](https://github.com/digitalocean/gradientai-python/commit/34382c06c5d61ac97572cb4977d020e1ede9d4ff)) +* **api:** update via SDK Studio ([c33920a](https://github.com/digitalocean/gradientai-python/commit/c33920aba0dc1f3b8f4f890ce706c86fd452dd6b)) +* **api:** update via SDK Studio ([359c8d8](https://github.com/digitalocean/gradientai-python/commit/359c8d88cec1d60f0beb810b5a0139443d0a3348)) +* **api:** update via SDK Studio ([f27643e](https://github.com/digitalocean/gradientai-python/commit/f27643e1e00f606029be919a7117801facc6e5b7)) +* **api:** update via SDK Studio ([e59144c](https://github.com/digitalocean/gradientai-python/commit/e59144c2d474a4003fd28b8eded08814ffa8d2f3)) +* **api:** update via SDK Studio ([97e1768](https://github.com/digitalocean/gradientai-python/commit/97e17687a348b8ef218c23a06729b6edb1ac5ea9)) +* **api:** update via SDK Studio ([eac41f1](https://github.com/digitalocean/gradientai-python/commit/eac41f12912b8d32ffa23d225f4ca56fa5c72505)) +* **api:** update via SDK Studio ([1fa7ebb](https://github.com/digitalocean/gradientai-python/commit/1fa7ebb0080db9087b82d29e7197e44dfbb1ebed)) +* **api:** update via SDK Studio ([aa2610a](https://github.com/digitalocean/gradientai-python/commit/aa2610afe7da79429e05bff64b4796de7f525681)) +* **api:** update via SDK Studio ([e5c8d76](https://github.com/digitalocean/gradientai-python/commit/e5c8d768388b16c06fcc2abee71a53dcc8b3e8c5)) +* **api:** update via SDK Studio ([5f700dc](https://github.com/digitalocean/gradientai-python/commit/5f700dc7a4e757015d3bd6f2e82a311114b82d77)) +* **api:** update via SDK Studio ([c042496](https://github.com/digitalocean/gradientai-python/commit/c04249614917198b1eb2324438605d99b719a1cf)) +* **api:** update via SDK Studio ([5ebec81](https://github.com/digitalocean/gradientai-python/commit/5ebec81604a206eba5e75a7e8990bd7711ba8f47)) +* **api:** update via SDK Studio ([cac54a8](https://github.com/digitalocean/gradientai-python/commit/cac54a81a3f22d34b2de0ebfac3c68a982178cad)) +* **api:** update via SDK Studio ([6d62ab0](https://github.com/digitalocean/gradientai-python/commit/6d62ab00594d70df0458a0a401f866af15a9298e)) +* **api:** update via SDK Studio ([0ccc62c](https://github.com/digitalocean/gradientai-python/commit/0ccc62cb8ef387e0aaf6784db25d5f99a587e5da)) +* **api:** update via SDK Studio ([e75adfb](https://github.com/digitalocean/gradientai-python/commit/e75adfbd2d035e57ae110a1d78ea40fb116975e5)) +* **api:** update via SDK Studio ([8bd264b](https://github.com/digitalocean/gradientai-python/commit/8bd264b4b4686ca078bf4eb4b5462f058406df3e)) +* **api:** update via SDK Studio ([6254ccf](https://github.com/digitalocean/gradientai-python/commit/6254ccf45cbe50ca8191c7149824964f5d00d82f)) +* **api:** update via SDK Studio ([8f5761b](https://github.com/digitalocean/gradientai-python/commit/8f5761b1d18fb48ad7488e6f0ad771c077eb7961)) +* **api:** update via SDK Studio ([f853616](https://github.com/digitalocean/gradientai-python/commit/f8536166320d1d5bacf1d10a5edb2f71691dde8b)) +* **client:** add support for aiohttp ([494afde](https://github.com/digitalocean/gradientai-python/commit/494afde754f735d1ba95011fc83d23d2410fcfdd)) + + +### Bug Fixes + +* **client:** correctly parse binary response | stream ([abba5be](https://github.com/digitalocean/gradientai-python/commit/abba5be958d03a7e5ce7d1cbf8069c0bcf52ee20)) +* **tests:** fix: tests which call HTTP endpoints directly with the example parameters ([e649dcb](https://github.com/digitalocean/gradientai-python/commit/e649dcb0f9416e9bf568cc9f3480d7e222052391)) + + +### Chores + +* **ci:** enable for pull requests ([b6b3f9e](https://github.com/digitalocean/gradientai-python/commit/b6b3f9ea85918cfc6fc7304b2d21c340d82a0083)) +* **internal:** codegen related update ([4126872](https://github.com/digitalocean/gradientai-python/commit/41268721eafd33fcca5688ca5dff7401f25bdeb2)) +* **internal:** codegen related update ([10b79fb](https://github.com/digitalocean/gradientai-python/commit/10b79fb1d51bcff6ed0d18e5ccd18fd1cd75af9f)) +* **internal:** update conftest.py ([12e2103](https://github.com/digitalocean/gradientai-python/commit/12e210389204ff74f504e1ec3aa5ba99f1b4971c)) +* **readme:** update badges ([6e40dc3](https://github.com/digitalocean/gradientai-python/commit/6e40dc3fa4e33082be7b0bbf65d07e9ae9ac6370)) +* **tests:** add tests for httpx client instantiation & proxies ([7ecf66c](https://github.com/digitalocean/gradientai-python/commit/7ecf66c58a124c153a32055967beacbd1a3bbcf3)) +* **tests:** run tests in parallel ([861dd6b](https://github.com/digitalocean/gradientai-python/commit/861dd6b75956f2c12814ad32b05624d8d8537d52)) +* **tests:** skip some failing tests on the latest python versions ([75b4539](https://github.com/digitalocean/gradientai-python/commit/75b45398c18e75be3389be20479f54521c2e474a)) +* update SDK settings ([ed595b0](https://github.com/digitalocean/gradientai-python/commit/ed595b0a23df125ffba733d7339e771997c3f149)) + + +### Documentation + +* **client:** fix httpx.Timeout documentation reference ([5d452d7](https://github.com/digitalocean/gradientai-python/commit/5d452d7245af6c80f47f8395f1c03493dfb53a52)) + ## 0.1.0-alpha.3 (2025-06-12) Full Changelog: [v0.1.0-alpha.2...v0.1.0-alpha.3](https://github.com/digitalocean/genai-python/compare/v0.1.0-alpha.2...v0.1.0-alpha.3) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7d5d60a7..086907ef 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -36,7 +36,7 @@ $ pip install -r requirements-dev.lock Most of the SDK is generated code. Modifications to code will be persisted between generations, but may result in merge conflicts between manual patches and changes from the generator. The generator will never -modify the contents of the `src/digitalocean_genai_sdk/lib/` and `examples/` directories. +modify the contents of the `src/gradientai/lib/` and `examples/` directories. ## Adding and running examples @@ -62,7 +62,7 @@ If you’d like to use the repository from source, you can either install from g To install via git: ```sh -$ pip install git+ssh://git@github.com/digitalocean/genai-python.git +$ pip install git+ssh://git@github.com/digitalocean/gradientai-python.git ``` Alternatively, you can build from source and install the wheel file: @@ -120,7 +120,7 @@ the changes aren't made through the automated pipeline, you may want to make rel ### Publish with a GitHub workflow -You can release to package managers by using [the `Publish PyPI` GitHub action](https://www.github.com/digitalocean/genai-python/actions/workflows/publish-pypi.yml). This requires a setup organization or repository secret to be set up. +You can release to package managers by using [the `Publish PyPI` GitHub action](https://www.github.com/digitalocean/gradientai-python/actions/workflows/publish-pypi.yml). This requires a setup organization or repository secret to be set up. ### Publish manually diff --git a/LICENSE b/LICENSE index 0c1fe1d5..974cb08a 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2025 Digitalocean Genai SDK + Copyright 2025 Gradient AI Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/README.md b/README.md index 7b7f4731..09b1e15d 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ -# Digitalocean Genai SDK Python API library +# Gradient AI Python API library -[![PyPI version](https://img.shields.io/pypi/v/c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python.svg)](https://pypi.org/project/c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python/) +[![PyPI version]()](https://pypi.org/project/c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python/) -The Digitalocean Genai SDK Python library provides convenient access to the Digitalocean Genai SDK REST API from any Python 3.8+ +The Gradient AI Python library provides convenient access to the Gradient AI REST API from any Python 3.8+ application. The library includes type definitions for all request params and response fields, and offers both synchronous and asynchronous clients powered by [httpx](https://github.com/encode/httpx). @@ -25,12 +25,10 @@ The full API of this library can be found in [api.md](api.md). ```python import os -from digitalocean_genai_sdk import DigitaloceanGenaiSDK +from gradientai import GradientAI -client = DigitaloceanGenaiSDK( - api_key=os.environ.get( - "DIGITALOCEAN_GENAI_SDK_API_KEY" - ), # This is the default and can be omitted +client = GradientAI( + api_key=os.environ.get("GRADIENTAI_API_KEY"), # This is the default and can be omitted ) versions = client.agents.versions.list( @@ -41,22 +39,20 @@ print(versions.agent_versions) While you can provide an `api_key` keyword argument, we recommend using [python-dotenv](https://pypi.org/project/python-dotenv/) -to add `DIGITALOCEAN_GENAI_SDK_API_KEY="My API Key"` to your `.env` file +to add `GRADIENTAI_API_KEY="My API Key"` to your `.env` file so that your API Key is not stored in source control. ## Async usage -Simply import `AsyncDigitaloceanGenaiSDK` instead of `DigitaloceanGenaiSDK` and use `await` with each API call: +Simply import `AsyncGradientAI` instead of `GradientAI` and use `await` with each API call: ```python import os import asyncio -from digitalocean_genai_sdk import AsyncDigitaloceanGenaiSDK +from gradientai import AsyncGradientAI -client = AsyncDigitaloceanGenaiSDK( - api_key=os.environ.get( - "DIGITALOCEAN_GENAI_SDK_API_KEY" - ), # This is the default and can be omitted +client = AsyncGradientAI( + api_key=os.environ.get("GRADIENTAI_API_KEY"), # This is the default and can be omitted ) @@ -72,6 +68,40 @@ asyncio.run(main()) Functionality between the synchronous and asynchronous clients is otherwise identical. +### With aiohttp + +By default, the async client uses `httpx` for HTTP requests. However, for improved concurrency performance you may also use `aiohttp` as the HTTP backend. + +You can enable this by installing `aiohttp`: + +```sh +# install from PyPI +pip install --pre c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python[aiohttp] +``` + +Then you can enable it by instantiating the client with `http_client=DefaultAioHttpClient()`: + +```python +import os +import asyncio +from gradientai import DefaultAioHttpClient +from gradientai import AsyncGradientAI + + +async def main() -> None: + async with AsyncGradientAI( + api_key=os.environ.get("GRADIENTAI_API_KEY"), # This is the default and can be omitted + http_client=DefaultAioHttpClient(), + ) as client: + versions = await client.agents.versions.list( + uuid="REPLACE_ME", + ) + print(versions.agent_versions) + + +asyncio.run(main()) +``` + ## Using types Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict). Responses are [Pydantic models](https://docs.pydantic.dev) which also provide helper methods for things like: @@ -86,42 +116,41 @@ Typed requests and responses provide autocomplete and documentation within your Nested parameters are dictionaries, typed using `TypedDict`, for example: ```python -from digitalocean_genai_sdk import DigitaloceanGenaiSDK +from gradientai import GradientAI -client = DigitaloceanGenaiSDK() +client = GradientAI() -data_source = client.knowledge_bases.data_sources.create( - path_knowledge_base_uuid="knowledge_base_uuid", - aws_data_source={}, +evaluation_test_case = client.regions.evaluation_test_cases.create( + star_metric={}, ) -print(data_source.aws_data_source) +print(evaluation_test_case.star_metric) ``` ## Handling errors -When the library is unable to connect to the API (for example, due to network connection problems or a timeout), a subclass of `digitalocean_genai_sdk.APIConnectionError` is raised. +When the library is unable to connect to the API (for example, due to network connection problems or a timeout), a subclass of `gradientai.APIConnectionError` is raised. When the API returns a non-success status code (that is, 4xx or 5xx -response), a subclass of `digitalocean_genai_sdk.APIStatusError` is raised, containing `status_code` and `response` properties. +response), a subclass of `gradientai.APIStatusError` is raised, containing `status_code` and `response` properties. -All errors inherit from `digitalocean_genai_sdk.APIError`. +All errors inherit from `gradientai.APIError`. ```python -import digitalocean_genai_sdk -from digitalocean_genai_sdk import DigitaloceanGenaiSDK +import gradientai +from gradientai import GradientAI -client = DigitaloceanGenaiSDK() +client = GradientAI() try: client.agents.versions.list( uuid="REPLACE_ME", ) -except digitalocean_genai_sdk.APIConnectionError as e: +except gradientai.APIConnectionError as e: print("The server could not be reached") print(e.__cause__) # an underlying Exception, likely raised within httpx. -except digitalocean_genai_sdk.RateLimitError as e: +except gradientai.RateLimitError as e: print("A 429 status code was received; we should back off a bit.") -except digitalocean_genai_sdk.APIStatusError as e: +except gradientai.APIStatusError as e: print("Another non-200-range status code was received") print(e.status_code) print(e.response) @@ -149,10 +178,10 @@ Connection errors (for example, due to a network connectivity problem), 408 Requ You can use the `max_retries` option to configure or disable retry settings: ```python -from digitalocean_genai_sdk import DigitaloceanGenaiSDK +from gradientai import GradientAI # Configure the default for all requests: -client = DigitaloceanGenaiSDK( +client = GradientAI( # default is 2 max_retries=0, ) @@ -166,19 +195,19 @@ client.with_options(max_retries=5).agents.versions.list( ### Timeouts By default requests time out after 1 minute. You can configure this with a `timeout` option, -which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/#fine-tuning-the-configuration) object: +which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/timeouts/#fine-tuning-the-configuration) object: ```python -from digitalocean_genai_sdk import DigitaloceanGenaiSDK +from gradientai import GradientAI # Configure the default for all requests: -client = DigitaloceanGenaiSDK( +client = GradientAI( # 20 seconds (default is 1 minute) timeout=20.0, ) # More granular control: -client = DigitaloceanGenaiSDK( +client = GradientAI( timeout=httpx.Timeout(60.0, read=5.0, write=10.0, connect=2.0), ) @@ -198,10 +227,10 @@ Note that requests that time out are [retried twice by default](#retries). We use the standard library [`logging`](https://docs.python.org/3/library/logging.html) module. -You can enable logging by setting the environment variable `DIGITALOCEAN_GENAI_SDK_LOG` to `info`. +You can enable logging by setting the environment variable `GRADIENT_AI_LOG` to `info`. ```shell -$ export DIGITALOCEAN_GENAI_SDK_LOG=info +$ export GRADIENT_AI_LOG=info ``` Or to `debug` for more verbose logging. @@ -223,9 +252,9 @@ if response.my_field is None: The "raw" Response object can be accessed by prefixing `.with_raw_response.` to any HTTP method call, e.g., ```py -from digitalocean_genai_sdk import DigitaloceanGenaiSDK +from gradientai import GradientAI -client = DigitaloceanGenaiSDK() +client = GradientAI() response = client.agents.versions.with_raw_response.list( uuid="REPLACE_ME", ) @@ -235,9 +264,9 @@ version = response.parse() # get the object that `agents.versions.list()` would print(version.agent_versions) ``` -These methods return an [`APIResponse`](https://github.com/digitalocean/genai-python/tree/main/src/digitalocean_genai_sdk/_response.py) object. +These methods return an [`APIResponse`](https://github.com/digitalocean/gradientai-python/tree/main/src/gradientai/_response.py) object. -The async client returns an [`AsyncAPIResponse`](https://github.com/digitalocean/genai-python/tree/main/src/digitalocean_genai_sdk/_response.py) with the same structure, the only difference being `await`able methods for reading the response content. +The async client returns an [`AsyncAPIResponse`](https://github.com/digitalocean/gradientai-python/tree/main/src/gradientai/_response.py) with the same structure, the only difference being `await`able methods for reading the response content. #### `.with_streaming_response` @@ -301,10 +330,10 @@ You can directly override the [httpx client](https://www.python-httpx.org/api/#c ```python import httpx -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, DefaultHttpxClient +from gradientai import GradientAI, DefaultHttpxClient -client = DigitaloceanGenaiSDK( - # Or use the `DIGITALOCEAN_GENAI_SDK_BASE_URL` env var +client = GradientAI( + # Or use the `GRADIENT_AI_BASE_URL` env var base_url="http://my.test.server.example.com:8083", http_client=DefaultHttpxClient( proxy="http://my.test.proxy.example.com", @@ -324,9 +353,9 @@ client.with_options(http_client=DefaultHttpxClient(...)) By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting. ```py -from digitalocean_genai_sdk import DigitaloceanGenaiSDK +from gradientai import GradientAI -with DigitaloceanGenaiSDK() as client: +with GradientAI() as client: # make requests here ... @@ -343,7 +372,7 @@ This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) con We take backwards-compatibility seriously and work hard to ensure you can rely on a smooth upgrade experience. -We are keen for your feedback; please open an [issue](https://www.github.com/digitalocean/genai-python/issues) with questions, bugs, or suggestions. +We are keen for your feedback; please open an [issue](https://www.github.com/digitalocean/gradientai-python/issues) with questions, bugs, or suggestions. ### Determining the installed version @@ -352,8 +381,8 @@ If you've upgraded to the latest version but aren't seeing any new features you You can determine the version that is being used at runtime with: ```py -import digitalocean_genai_sdk -print(digitalocean_genai_sdk.__version__) +import gradientai +print(gradientai.__version__) ``` ## Requirements diff --git a/SECURITY.md b/SECURITY.md index d08f7996..a7593759 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -16,7 +16,7 @@ before making any information public. ## Reporting Non-SDK Related Security Issues If you encounter security issues that are not directly related to SDKs but pertain to the services -or products provided by Digitalocean Genai SDK, please follow the respective company's security reporting guidelines. +or products provided by Gradient AI, please follow the respective company's security reporting guidelines. --- diff --git a/api.md b/api.md index 32dbe7df..970f6951 100644 --- a/api.md +++ b/api.md @@ -3,14 +3,15 @@ Types: ```python -from digitalocean_genai_sdk.types import ( +from gradientai.types import ( APIAgent, APIAgentAPIKeyInfo, + APIAgentModel, APIAnthropicAPIKeyInfo, APIDeploymentVisibility, - APIModel, APIOpenAIAPIKeyInfo, APIRetrievalMethod, + APIWorkspace, AgentCreateResponse, AgentRetrieveResponse, AgentUpdateResponse, @@ -22,19 +23,19 @@ from digitalocean_genai_sdk.types import ( Methods: -- client.agents.create(\*\*params) -> AgentCreateResponse -- client.agents.retrieve(uuid) -> AgentRetrieveResponse -- client.agents.update(path_uuid, \*\*params) -> AgentUpdateResponse -- client.agents.list(\*\*params) -> AgentListResponse -- client.agents.delete(uuid) -> AgentDeleteResponse -- client.agents.update_status(path_uuid, \*\*params) -> AgentUpdateStatusResponse +- client.agents.create(\*\*params) -> AgentCreateResponse +- client.agents.retrieve(uuid) -> AgentRetrieveResponse +- client.agents.update(path_uuid, \*\*params) -> AgentUpdateResponse +- client.agents.list(\*\*params) -> AgentListResponse +- client.agents.delete(uuid) -> AgentDeleteResponse +- client.agents.update_status(path_uuid, \*\*params) -> AgentUpdateStatusResponse ## APIKeys Types: ```python -from digitalocean_genai_sdk.types.agents import ( +from gradientai.types.agents import ( APIKeyCreateResponse, APIKeyUpdateResponse, APIKeyListResponse, @@ -45,18 +46,18 @@ from digitalocean_genai_sdk.types.agents import ( Methods: -- client.agents.api_keys.create(path_agent_uuid, \*\*params) -> APIKeyCreateResponse -- client.agents.api_keys.update(path_api_key_uuid, \*, path_agent_uuid, \*\*params) -> APIKeyUpdateResponse -- client.agents.api_keys.list(agent_uuid, \*\*params) -> APIKeyListResponse -- client.agents.api_keys.delete(api_key_uuid, \*, agent_uuid) -> APIKeyDeleteResponse -- client.agents.api_keys.regenerate(api_key_uuid, \*, agent_uuid) -> APIKeyRegenerateResponse +- client.agents.api_keys.create(path_agent_uuid, \*\*params) -> APIKeyCreateResponse +- client.agents.api_keys.update(path_api_key_uuid, \*, path_agent_uuid, \*\*params) -> APIKeyUpdateResponse +- client.agents.api_keys.list(agent_uuid, \*\*params) -> APIKeyListResponse +- client.agents.api_keys.delete(api_key_uuid, \*, agent_uuid) -> APIKeyDeleteResponse +- client.agents.api_keys.regenerate(api_key_uuid, \*, agent_uuid) -> APIKeyRegenerateResponse ## Functions Types: ```python -from digitalocean_genai_sdk.types.agents import ( +from gradientai.types.agents import ( FunctionCreateResponse, FunctionUpdateResponse, FunctionDeleteResponse, @@ -65,51 +66,43 @@ from digitalocean_genai_sdk.types.agents import ( Methods: -- client.agents.functions.create(path_agent_uuid, \*\*params) -> FunctionCreateResponse -- client.agents.functions.update(path_function_uuid, \*, path_agent_uuid, \*\*params) -> FunctionUpdateResponse -- client.agents.functions.delete(function_uuid, \*, agent_uuid) -> FunctionDeleteResponse +- client.agents.functions.create(path_agent_uuid, \*\*params) -> FunctionCreateResponse +- client.agents.functions.update(path_function_uuid, \*, path_agent_uuid, \*\*params) -> FunctionUpdateResponse +- client.agents.functions.delete(function_uuid, \*, agent_uuid) -> FunctionDeleteResponse ## Versions Types: ```python -from digitalocean_genai_sdk.types.agents import ( - APILinks, - APIMeta, - VersionUpdateResponse, - VersionListResponse, -) +from gradientai.types.agents import APILinks, APIMeta, VersionUpdateResponse, VersionListResponse ``` Methods: -- client.agents.versions.update(path_uuid, \*\*params) -> VersionUpdateResponse -- client.agents.versions.list(uuid, \*\*params) -> VersionListResponse +- client.agents.versions.update(path_uuid, \*\*params) -> VersionUpdateResponse +- client.agents.versions.list(uuid, \*\*params) -> VersionListResponse ## KnowledgeBases Types: ```python -from digitalocean_genai_sdk.types.agents import ( - APILinkKnowledgeBaseOutput, - KnowledgeBaseDetachResponse, -) +from gradientai.types.agents import APILinkKnowledgeBaseOutput, KnowledgeBaseDetachResponse ``` Methods: -- client.agents.knowledge_bases.attach(agent_uuid) -> APILinkKnowledgeBaseOutput -- client.agents.knowledge_bases.attach_single(knowledge_base_uuid, \*, agent_uuid) -> APILinkKnowledgeBaseOutput -- client.agents.knowledge_bases.detach(knowledge_base_uuid, \*, agent_uuid) -> KnowledgeBaseDetachResponse +- client.agents.knowledge_bases.attach(agent_uuid) -> APILinkKnowledgeBaseOutput +- client.agents.knowledge_bases.attach_single(knowledge_base_uuid, \*, agent_uuid) -> APILinkKnowledgeBaseOutput +- client.agents.knowledge_bases.detach(knowledge_base_uuid, \*, agent_uuid) -> KnowledgeBaseDetachResponse ## ChildAgents Types: ```python -from digitalocean_genai_sdk.types.agents import ( +from gradientai.types.agents import ( ChildAgentUpdateResponse, ChildAgentDeleteResponse, ChildAgentAddResponse, @@ -119,10 +112,10 @@ from digitalocean_genai_sdk.types.agents import ( Methods: -- client.agents.child_agents.update(path_child_agent_uuid, \*, path_parent_agent_uuid, \*\*params) -> ChildAgentUpdateResponse -- client.agents.child_agents.delete(child_agent_uuid, \*, parent_agent_uuid) -> ChildAgentDeleteResponse -- client.agents.child_agents.add(path_child_agent_uuid, \*, path_parent_agent_uuid, \*\*params) -> ChildAgentAddResponse -- client.agents.child_agents.view(uuid) -> ChildAgentViewResponse +- client.agents.child_agents.update(path_child_agent_uuid, \*, path_parent_agent_uuid, \*\*params) -> ChildAgentUpdateResponse +- client.agents.child_agents.delete(child_agent_uuid, \*, parent_agent_uuid) -> ChildAgentDeleteResponse +- client.agents.child_agents.add(path_child_agent_uuid, \*, path_parent_agent_uuid, \*\*params) -> ChildAgentAddResponse +- client.agents.child_agents.view(uuid) -> ChildAgentViewResponse # Providers @@ -133,7 +126,7 @@ Methods: Types: ```python -from digitalocean_genai_sdk.types.providers.anthropic import ( +from gradientai.types.providers.anthropic import ( KeyCreateResponse, KeyRetrieveResponse, KeyUpdateResponse, @@ -145,12 +138,12 @@ from digitalocean_genai_sdk.types.providers.anthropic import ( Methods: -- client.providers.anthropic.keys.create(\*\*params) -> KeyCreateResponse -- client.providers.anthropic.keys.retrieve(api_key_uuid) -> KeyRetrieveResponse -- client.providers.anthropic.keys.update(path_api_key_uuid, \*\*params) -> KeyUpdateResponse -- client.providers.anthropic.keys.list(\*\*params) -> KeyListResponse -- client.providers.anthropic.keys.delete(api_key_uuid) -> KeyDeleteResponse -- client.providers.anthropic.keys.list_agents(uuid, \*\*params) -> KeyListAgentsResponse +- client.providers.anthropic.keys.create(\*\*params) -> KeyCreateResponse +- client.providers.anthropic.keys.retrieve(api_key_uuid) -> KeyRetrieveResponse +- client.providers.anthropic.keys.update(path_api_key_uuid, \*\*params) -> KeyUpdateResponse +- client.providers.anthropic.keys.list(\*\*params) -> KeyListResponse +- client.providers.anthropic.keys.delete(api_key_uuid) -> KeyDeleteResponse +- client.providers.anthropic.keys.list_agents(uuid, \*\*params) -> KeyListAgentsResponse ## OpenAI @@ -159,7 +152,7 @@ Methods: Types: ```python -from digitalocean_genai_sdk.types.providers.openai import ( +from gradientai.types.providers.openai import ( KeyCreateResponse, KeyRetrieveResponse, KeyUpdateResponse, @@ -171,47 +164,108 @@ from digitalocean_genai_sdk.types.providers.openai import ( Methods: -- client.providers.openai.keys.create(\*\*params) -> KeyCreateResponse -- client.providers.openai.keys.retrieve(api_key_uuid) -> KeyRetrieveResponse -- client.providers.openai.keys.update(path_api_key_uuid, \*\*params) -> KeyUpdateResponse -- client.providers.openai.keys.list(\*\*params) -> KeyListResponse -- client.providers.openai.keys.delete(api_key_uuid) -> KeyDeleteResponse -- client.providers.openai.keys.retrieve_agents(uuid, \*\*params) -> KeyRetrieveAgentsResponse +- client.providers.openai.keys.create(\*\*params) -> KeyCreateResponse +- client.providers.openai.keys.retrieve(api_key_uuid) -> KeyRetrieveResponse +- client.providers.openai.keys.update(path_api_key_uuid, \*\*params) -> KeyUpdateResponse +- client.providers.openai.keys.list(\*\*params) -> KeyListResponse +- client.providers.openai.keys.delete(api_key_uuid) -> KeyDeleteResponse +- client.providers.openai.keys.retrieve_agents(uuid, \*\*params) -> KeyRetrieveAgentsResponse + +# Regions + +Types: + +```python +from gradientai.types import ( + APIEvaluationMetric, + RegionListResponse, + RegionListEvaluationMetricsResponse, +) +``` -# Auth +Methods: -## Agents +- client.regions.list(\*\*params) -> RegionListResponse +- client.regions.list_evaluation_metrics() -> RegionListEvaluationMetricsResponse -### Token +## EvaluationRuns Types: ```python -from digitalocean_genai_sdk.types.auth.agents import TokenCreateResponse +from gradientai.types.regions import EvaluationRunCreateResponse, EvaluationRunRetrieveResponse ``` Methods: -- client.auth.agents.token.create(path_agent_uuid, \*\*params) -> TokenCreateResponse +- client.regions.evaluation_runs.create(\*\*params) -> EvaluationRunCreateResponse +- client.regions.evaluation_runs.retrieve(evaluation_run_uuid) -> EvaluationRunRetrieveResponse -# Regions +### Results Types: ```python -from digitalocean_genai_sdk.types import RegionListResponse +from gradientai.types.regions.evaluation_runs import ( + APIEvaluationMetricResult, + APIEvaluationRun, + APIPrompt, + ResultRetrieveResponse, + ResultRetrievePromptResponse, +) +``` + +Methods: + +- client.regions.evaluation_runs.results.retrieve(evaluation_run_uuid) -> ResultRetrieveResponse +- client.regions.evaluation_runs.results.retrieve_prompt(prompt_id, \*, evaluation_run_uuid) -> ResultRetrievePromptResponse + +## EvaluationTestCases + +Types: + +```python +from gradientai.types.regions import ( + APIEvaluationTestCase, + APIStarMetric, + EvaluationTestCaseCreateResponse, + EvaluationTestCaseRetrieveResponse, + EvaluationTestCaseUpdateResponse, + EvaluationTestCaseListResponse, + EvaluationTestCaseListEvaluationRunsResponse, +) ``` Methods: -- client.regions.list(\*\*params) -> RegionListResponse +- client.regions.evaluation_test_cases.create(\*\*params) -> EvaluationTestCaseCreateResponse +- client.regions.evaluation_test_cases.retrieve(test_case_uuid) -> EvaluationTestCaseRetrieveResponse +- client.regions.evaluation_test_cases.update(path_test_case_uuid, \*\*params) -> EvaluationTestCaseUpdateResponse +- client.regions.evaluation_test_cases.list() -> EvaluationTestCaseListResponse +- client.regions.evaluation_test_cases.list_evaluation_runs(evaluation_test_case_uuid, \*\*params) -> EvaluationTestCaseListEvaluationRunsResponse + +## EvaluationDatasets + +Types: + +```python +from gradientai.types.regions import ( + EvaluationDatasetCreateResponse, + EvaluationDatasetCreateFileUploadPresignedURLsResponse, +) +``` + +Methods: + +- client.regions.evaluation_datasets.create(\*\*params) -> EvaluationDatasetCreateResponse +- client.regions.evaluation_datasets.create_file_upload_presigned_urls(\*\*params) -> EvaluationDatasetCreateFileUploadPresignedURLsResponse # IndexingJobs Types: ```python -from digitalocean_genai_sdk.types import ( +from gradientai.types import ( APIIndexingJob, IndexingJobCreateResponse, IndexingJobRetrieveResponse, @@ -223,18 +277,18 @@ from digitalocean_genai_sdk.types import ( Methods: -- client.indexing_jobs.create(\*\*params) -> IndexingJobCreateResponse -- client.indexing_jobs.retrieve(uuid) -> IndexingJobRetrieveResponse -- client.indexing_jobs.list(\*\*params) -> IndexingJobListResponse -- client.indexing_jobs.retrieve_data_sources(indexing_job_uuid) -> IndexingJobRetrieveDataSourcesResponse -- client.indexing_jobs.update_cancel(path_uuid, \*\*params) -> IndexingJobUpdateCancelResponse +- client.indexing_jobs.create(\*\*params) -> IndexingJobCreateResponse +- client.indexing_jobs.retrieve(uuid) -> IndexingJobRetrieveResponse +- client.indexing_jobs.list(\*\*params) -> IndexingJobListResponse +- client.indexing_jobs.retrieve_data_sources(indexing_job_uuid) -> IndexingJobRetrieveDataSourcesResponse +- client.indexing_jobs.update_cancel(path_uuid, \*\*params) -> IndexingJobUpdateCancelResponse # KnowledgeBases Types: ```python -from digitalocean_genai_sdk.types import ( +from gradientai.types import ( APIKnowledgeBase, KnowledgeBaseCreateResponse, KnowledgeBaseRetrieveResponse, @@ -246,22 +300,23 @@ from digitalocean_genai_sdk.types import ( Methods: -- client.knowledge_bases.create(\*\*params) -> KnowledgeBaseCreateResponse -- client.knowledge_bases.retrieve(uuid) -> KnowledgeBaseRetrieveResponse -- client.knowledge_bases.update(path_uuid, \*\*params) -> KnowledgeBaseUpdateResponse -- client.knowledge_bases.list(\*\*params) -> KnowledgeBaseListResponse -- client.knowledge_bases.delete(uuid) -> KnowledgeBaseDeleteResponse +- client.knowledge_bases.create(\*\*params) -> KnowledgeBaseCreateResponse +- client.knowledge_bases.retrieve(uuid) -> KnowledgeBaseRetrieveResponse +- client.knowledge_bases.update(path_uuid, \*\*params) -> KnowledgeBaseUpdateResponse +- client.knowledge_bases.list(\*\*params) -> KnowledgeBaseListResponse +- client.knowledge_bases.delete(uuid) -> KnowledgeBaseDeleteResponse ## DataSources Types: ```python -from digitalocean_genai_sdk.types.knowledge_bases import ( +from gradientai.types.knowledge_bases import ( APIFileUploadDataSource, APIKnowledgeBaseDataSource, APISpacesDataSource, APIWebCrawlerDataSource, + AwsDataSource, DataSourceCreateResponse, DataSourceListResponse, DataSourceDeleteResponse, @@ -270,28 +325,32 @@ from digitalocean_genai_sdk.types.knowledge_bases import ( Methods: -- client.knowledge_bases.data_sources.create(path_knowledge_base_uuid, \*\*params) -> DataSourceCreateResponse -- client.knowledge_bases.data_sources.list(knowledge_base_uuid, \*\*params) -> DataSourceListResponse -- client.knowledge_bases.data_sources.delete(data_source_uuid, \*, knowledge_base_uuid) -> DataSourceDeleteResponse +- client.knowledge_bases.data_sources.create(path_knowledge_base_uuid, \*\*params) -> DataSourceCreateResponse +- client.knowledge_bases.data_sources.list(knowledge_base_uuid, \*\*params) -> DataSourceListResponse +- client.knowledge_bases.data_sources.delete(data_source_uuid, \*, knowledge_base_uuid) -> DataSourceDeleteResponse -# APIKeys +# Chat + +## Completions Types: ```python -from digitalocean_genai_sdk.types import APIAgreement, APIModelVersion, APIKeyListResponse +from gradientai.types.chat import ChatCompletionTokenLogprob, CompletionCreateResponse ``` Methods: -- client.api_keys.list(\*\*params) -> APIKeyListResponse +- client.chat.completions.create(\*\*params) -> CompletionCreateResponse + +# Inference ## APIKeys Types: ```python -from digitalocean_genai_sdk.types.api_keys import ( +from gradientai.types.inference import ( APIModelAPIKeyInfo, APIKeyCreateResponse, APIKeyUpdateResponse, @@ -303,49 +362,33 @@ from digitalocean_genai_sdk.types.api_keys import ( Methods: -- client.api*keys.api_keys.create(\*\*params) -> APIKeyCreateResponse -- client.api*keys.api_keys.update(path_api_key_uuid, \*\*params) -> APIKeyUpdateResponse -- client.api*keys.api_keys.list(\*\*params) -> APIKeyListResponse -- client.api*keys.api_keys.delete(api_key_uuid) -> APIKeyDeleteResponse -- client.api*keys.api_keys.update_regenerate(api_key_uuid) -> APIKeyUpdateRegenerateResponse - -# Chat - -Types: - -```python -from digitalocean_genai_sdk.types import ( - ChatCompletionRequestMessageContentPartText, - ChatCompletionTokenLogprob, - ChatCreateCompletionResponse, -) -``` - -Methods: - -- client.chat.create_completion(\*\*params) -> ChatCreateCompletionResponse +- client.inference.api_keys.create(\*\*params) -> APIKeyCreateResponse +- client.inference.api_keys.update(path_api_key_uuid, \*\*params) -> APIKeyUpdateResponse +- client.inference.api_keys.list(\*\*params) -> APIKeyListResponse +- client.inference.api_keys.delete(api_key_uuid) -> APIKeyDeleteResponse +- client.inference.api_keys.update_regenerate(api_key_uuid) -> APIKeyUpdateRegenerateResponse -# Embeddings +## Models Types: ```python -from digitalocean_genai_sdk.types import EmbeddingCreateResponse +from gradientai.types.inference import Model, ModelListResponse ``` Methods: -- client.embeddings.create(\*\*params) -> EmbeddingCreateResponse +- client.inference.models.retrieve(model) -> Model +- client.inference.models.list() -> ModelListResponse # Models Types: ```python -from digitalocean_genai_sdk.types import Model, ModelListResponse +from gradientai.types import APIAgreement, APIModel, APIModelVersion, ModelListResponse ``` Methods: -- client.models.retrieve(model) -> Model -- client.models.list() -> ModelListResponse +- client.models.list(\*\*params) -> ModelListResponse diff --git a/bin/check-release-environment b/bin/check-release-environment index 9e89a88a..b1bd8969 100644 --- a/bin/check-release-environment +++ b/bin/check-release-environment @@ -3,7 +3,7 @@ errors=() if [ -z "${PYPI_TOKEN}" ]; then - errors+=("The DIGITALOCEAN_GENAI_SDK_PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.") + errors+=("The GRADIENT_AI_PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.") fi lenErrors=${#errors[@]} diff --git a/mypy.ini b/mypy.ini index 54f4282a..748d8234 100644 --- a/mypy.ini +++ b/mypy.ini @@ -8,7 +8,7 @@ show_error_codes = True # # We also exclude our `tests` as mypy doesn't always infer # types correctly and Pyright will still catch any type errors. -exclude = ^(src/digitalocean_genai_sdk/_files\.py|_dev/.*\.py|tests/.*)$ +exclude = ^(src/gradientai/_files\.py|_dev/.*\.py|tests/.*)$ strict_equality = True implicit_reexport = True diff --git a/pyproject.toml b/pyproject.toml index 9dd31517..0dd5228b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,11 +1,11 @@ [project] name = "c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python" -version = "0.1.0-alpha.3" -description = "The official Python library for the digitalocean-genai-sdk API" +version = "0.1.0-alpha.4" +description = "The official Python library for the GradientAI API" dynamic = ["readme"] license = "Apache-2.0" authors = [ -{ name = "Digitalocean Genai SDK", email = "" }, +{ name = "Gradient AI", email = "" }, ] dependencies = [ "httpx>=0.23.0, <1", @@ -34,9 +34,11 @@ classifiers = [ ] [project.urls] -Homepage = "https://github.com/digitalocean/genai-python" -Repository = "https://github.com/digitalocean/genai-python" +Homepage = "https://github.com/digitalocean/gradientai-python" +Repository = "https://github.com/digitalocean/gradientai-python" +[project.optional-dependencies] +aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.6"] [tool.rye] managed = true @@ -54,6 +56,7 @@ dev-dependencies = [ "importlib-metadata>=6.7.0", "rich>=13.7.1", "nest_asyncio==1.6.0", + "pytest-xdist>=3.6.1", ] [tool.rye.scripts] @@ -75,14 +78,14 @@ format = { chain = [ "check:ruff" = "ruff check ." "fix:ruff" = "ruff check --fix ." -"check:importable" = "python -c 'import digitalocean_genai_sdk'" +"check:importable" = "python -c 'import gradientai'" typecheck = { chain = [ "typecheck:pyright", "typecheck:mypy" ]} "typecheck:pyright" = "pyright" -"typecheck:verify-types" = "pyright --verifytypes digitalocean_genai_sdk --ignoreexternal" +"typecheck:verify-types" = "pyright --verifytypes gradientai --ignoreexternal" "typecheck:mypy" = "mypy ." [build-system] @@ -95,7 +98,7 @@ include = [ ] [tool.hatch.build.targets.wheel] -packages = ["src/digitalocean_genai_sdk"] +packages = ["src/gradientai"] [tool.hatch.build.targets.sdist] # Basically everything except hidden files/directories (such as .github, .devcontainers, .python-version, etc) @@ -121,11 +124,11 @@ path = "README.md" [[tool.hatch.metadata.hooks.fancy-pypi-readme.substitutions]] # replace relative links with absolute links pattern = '\[(.+?)\]\(((?!https?://)\S+?)\)' -replacement = '[\1](https://github.com/digitalocean/genai-python/tree/main/\g<2>)' +replacement = '[\1](https://github.com/digitalocean/gradientai-python/tree/main/\g<2>)' [tool.pytest.ini_options] testpaths = ["tests"] -addopts = "--tb=short" +addopts = "--tb=short -n auto" xfail_strict = true asyncio_mode = "auto" asyncio_default_fixture_loop_scope = "session" @@ -198,7 +201,7 @@ length-sort = true length-sort-straight = true combine-as-imports = true extra-standard-library = ["typing_extensions"] -known-first-party = ["digitalocean_genai_sdk", "tests"] +known-first-party = ["gradientai", "tests"] [tool.ruff.lint.per-file-ignores] "bin/**.py" = ["T201", "T203"] diff --git a/release-please-config.json b/release-please-config.json index 234b9475..2ff9a58c 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -61,6 +61,6 @@ ], "release-type": "python", "extra-files": [ - "src/digitalocean_genai_sdk/_version.py" + "src/gradientai/_version.py" ] } \ No newline at end of file diff --git a/requirements-dev.lock b/requirements-dev.lock index f784e9a3..85b6a829 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -10,6 +10,13 @@ # universal: false -e file:. +aiohappyeyeballs==2.6.1 + # via aiohttp +aiohttp==3.12.8 + # via c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python + # via httpx-aiohttp +aiosignal==1.3.2 + # via aiohttp annotated-types==0.6.0 # via pydantic anyio==4.4.0 @@ -17,6 +24,10 @@ anyio==4.4.0 # via httpx argcomplete==3.1.2 # via nox +async-timeout==5.0.1 + # via aiohttp +attrs==25.3.0 + # via aiohttp certifi==2023.7.22 # via httpcore # via httpx @@ -30,18 +41,27 @@ distro==1.8.0 exceptiongroup==1.2.2 # via anyio # via pytest +execnet==2.1.1 + # via pytest-xdist filelock==3.12.4 # via virtualenv +frozenlist==1.6.2 + # via aiohttp + # via aiosignal h11==0.14.0 # via httpcore httpcore==1.0.2 # via httpx httpx==0.28.1 # via c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python + # via httpx-aiohttp # via respx +httpx-aiohttp==0.1.6 + # via c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python idna==3.4 # via anyio # via httpx + # via yarl importlib-metadata==7.0.0 iniconfig==2.0.0 # via pytest @@ -49,6 +69,9 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py +multidict==6.4.4 + # via aiohttp + # via yarl mypy==1.14.1 mypy-extensions==1.0.0 # via mypy @@ -63,6 +86,9 @@ platformdirs==3.11.0 # via virtualenv pluggy==1.5.0 # via pytest +propcache==0.3.1 + # via aiohttp + # via yarl pydantic==2.10.3 # via c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python pydantic-core==2.27.1 @@ -72,7 +98,9 @@ pygments==2.18.0 pyright==1.1.399 pytest==8.3.3 # via pytest-asyncio + # via pytest-xdist pytest-asyncio==0.24.0 +pytest-xdist==3.7.0 python-dateutil==2.8.2 # via time-machine pytz==2023.3.post1 @@ -94,11 +122,14 @@ tomli==2.0.2 typing-extensions==4.12.2 # via anyio # via c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python + # via multidict # via mypy # via pydantic # via pydantic-core # via pyright virtualenv==20.24.5 # via nox +yarl==1.20.0 + # via aiohttp zipp==3.17.0 # via importlib-metadata diff --git a/requirements.lock b/requirements.lock index dab2f6ce..47944bd5 100644 --- a/requirements.lock +++ b/requirements.lock @@ -10,11 +10,22 @@ # universal: false -e file:. +aiohappyeyeballs==2.6.1 + # via aiohttp +aiohttp==3.12.8 + # via c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python + # via httpx-aiohttp +aiosignal==1.3.2 + # via aiohttp annotated-types==0.6.0 # via pydantic anyio==4.4.0 # via c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python # via httpx +async-timeout==5.0.1 + # via aiohttp +attrs==25.3.0 + # via aiohttp certifi==2023.7.22 # via httpcore # via httpx @@ -22,15 +33,28 @@ distro==1.8.0 # via c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python exceptiongroup==1.2.2 # via anyio +frozenlist==1.6.2 + # via aiohttp + # via aiosignal h11==0.14.0 # via httpcore httpcore==1.0.2 # via httpx httpx==0.28.1 # via c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python + # via httpx-aiohttp +httpx-aiohttp==0.1.6 + # via c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python idna==3.4 # via anyio # via httpx + # via yarl +multidict==6.4.4 + # via aiohttp + # via yarl +propcache==0.3.1 + # via aiohttp + # via yarl pydantic==2.10.3 # via c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python pydantic-core==2.27.1 @@ -41,5 +65,8 @@ sniffio==1.3.0 typing-extensions==4.12.2 # via anyio # via c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python + # via multidict # via pydantic # via pydantic-core +yarl==1.20.0 + # via aiohttp diff --git a/scripts/lint b/scripts/lint index 3f725f2d..37b38f6f 100755 --- a/scripts/lint +++ b/scripts/lint @@ -8,4 +8,4 @@ echo "==> Running lints" rye run lint echo "==> Making sure it imports" -rye run python -c 'import digitalocean_genai_sdk' +rye run python -c 'import gradientai' diff --git a/scripts/utils/upload-artifact.sh b/scripts/utils/upload-artifact.sh index c1019559..eb717c71 100755 --- a/scripts/utils/upload-artifact.sh +++ b/scripts/utils/upload-artifact.sh @@ -18,7 +18,7 @@ UPLOAD_RESPONSE=$(tar -cz . | curl -v -X PUT \ if echo "$UPLOAD_RESPONSE" | grep -q "HTTP/[0-9.]* 200"; then echo -e "\033[32mUploaded build to Stainless storage.\033[0m" - echo -e "\033[32mInstallation: pip install --pre 'https://pkg.stainless.com/s/digitalocean-genai-sdk-python/$SHA'\033[0m" + echo -e "\033[32mInstallation: pip install --pre 'https://pkg.stainless.com/s/gradientai-python/$SHA'\033[0m" else echo -e "\033[31mFailed to upload artifact.\033[0m" exit 1 diff --git a/src/digitalocean_genai_sdk/_utils/_resources_proxy.py b/src/digitalocean_genai_sdk/_utils/_resources_proxy.py deleted file mode 100644 index 4ebaf7a4..00000000 --- a/src/digitalocean_genai_sdk/_utils/_resources_proxy.py +++ /dev/null @@ -1,24 +0,0 @@ -from __future__ import annotations - -from typing import Any -from typing_extensions import override - -from ._proxy import LazyProxy - - -class ResourcesProxy(LazyProxy[Any]): - """A proxy for the `digitalocean_genai_sdk.resources` module. - - This is used so that we can lazily import `digitalocean_genai_sdk.resources` only when - needed *and* so that users can just import `digitalocean_genai_sdk` and reference `digitalocean_genai_sdk.resources` - """ - - @override - def __load__(self) -> Any: - import importlib - - mod = importlib.import_module("digitalocean_genai_sdk.resources") - return mod - - -resources = ResourcesProxy().__as_proxied__() diff --git a/src/digitalocean_genai_sdk/_version.py b/src/digitalocean_genai_sdk/_version.py deleted file mode 100644 index 50483bc2..00000000 --- a/src/digitalocean_genai_sdk/_version.py +++ /dev/null @@ -1,4 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -__title__ = "digitalocean_genai_sdk" -__version__ = "0.1.0-alpha.3" # x-release-please-version diff --git a/src/digitalocean_genai_sdk/resources/api_keys/__init__.py b/src/digitalocean_genai_sdk/resources/api_keys/__init__.py deleted file mode 100644 index ed14565c..00000000 --- a/src/digitalocean_genai_sdk/resources/api_keys/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from .api_keys import ( - APIKeysResource, - AsyncAPIKeysResource, - APIKeysResourceWithRawResponse, - AsyncAPIKeysResourceWithRawResponse, - APIKeysResourceWithStreamingResponse, - AsyncAPIKeysResourceWithStreamingResponse, -) - -__all__ = [ - "APIKeysResource", - "AsyncAPIKeysResource", - "APIKeysResourceWithRawResponse", - "AsyncAPIKeysResourceWithRawResponse", - "APIKeysResourceWithStreamingResponse", - "AsyncAPIKeysResourceWithStreamingResponse", -] diff --git a/src/digitalocean_genai_sdk/resources/auth/__init__.py b/src/digitalocean_genai_sdk/resources/auth/__init__.py deleted file mode 100644 index 7c844a98..00000000 --- a/src/digitalocean_genai_sdk/resources/auth/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from .auth import ( - AuthResource, - AsyncAuthResource, - AuthResourceWithRawResponse, - AsyncAuthResourceWithRawResponse, - AuthResourceWithStreamingResponse, - AsyncAuthResourceWithStreamingResponse, -) -from .agents import ( - AgentsResource, - AsyncAgentsResource, - AgentsResourceWithRawResponse, - AsyncAgentsResourceWithRawResponse, - AgentsResourceWithStreamingResponse, - AsyncAgentsResourceWithStreamingResponse, -) - -__all__ = [ - "AgentsResource", - "AsyncAgentsResource", - "AgentsResourceWithRawResponse", - "AsyncAgentsResourceWithRawResponse", - "AgentsResourceWithStreamingResponse", - "AsyncAgentsResourceWithStreamingResponse", - "AuthResource", - "AsyncAuthResource", - "AuthResourceWithRawResponse", - "AsyncAuthResourceWithRawResponse", - "AuthResourceWithStreamingResponse", - "AsyncAuthResourceWithStreamingResponse", -] diff --git a/src/digitalocean_genai_sdk/resources/auth/agents/__init__.py b/src/digitalocean_genai_sdk/resources/auth/agents/__init__.py deleted file mode 100644 index 2972198f..00000000 --- a/src/digitalocean_genai_sdk/resources/auth/agents/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from .token import ( - TokenResource, - AsyncTokenResource, - TokenResourceWithRawResponse, - AsyncTokenResourceWithRawResponse, - TokenResourceWithStreamingResponse, - AsyncTokenResourceWithStreamingResponse, -) -from .agents import ( - AgentsResource, - AsyncAgentsResource, - AgentsResourceWithRawResponse, - AsyncAgentsResourceWithRawResponse, - AgentsResourceWithStreamingResponse, - AsyncAgentsResourceWithStreamingResponse, -) - -__all__ = [ - "TokenResource", - "AsyncTokenResource", - "TokenResourceWithRawResponse", - "AsyncTokenResourceWithRawResponse", - "TokenResourceWithStreamingResponse", - "AsyncTokenResourceWithStreamingResponse", - "AgentsResource", - "AsyncAgentsResource", - "AgentsResourceWithRawResponse", - "AsyncAgentsResourceWithRawResponse", - "AgentsResourceWithStreamingResponse", - "AsyncAgentsResourceWithStreamingResponse", -] diff --git a/src/digitalocean_genai_sdk/resources/auth/agents/agents.py b/src/digitalocean_genai_sdk/resources/auth/agents/agents.py deleted file mode 100644 index a0aa9faf..00000000 --- a/src/digitalocean_genai_sdk/resources/auth/agents/agents.py +++ /dev/null @@ -1,102 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from .token import ( - TokenResource, - AsyncTokenResource, - TokenResourceWithRawResponse, - AsyncTokenResourceWithRawResponse, - TokenResourceWithStreamingResponse, - AsyncTokenResourceWithStreamingResponse, -) -from ...._compat import cached_property -from ...._resource import SyncAPIResource, AsyncAPIResource - -__all__ = ["AgentsResource", "AsyncAgentsResource"] - - -class AgentsResource(SyncAPIResource): - @cached_property - def token(self) -> TokenResource: - return TokenResource(self._client) - - @cached_property - def with_raw_response(self) -> AgentsResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers - """ - return AgentsResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AgentsResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response - """ - return AgentsResourceWithStreamingResponse(self) - - -class AsyncAgentsResource(AsyncAPIResource): - @cached_property - def token(self) -> AsyncTokenResource: - return AsyncTokenResource(self._client) - - @cached_property - def with_raw_response(self) -> AsyncAgentsResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers - """ - return AsyncAgentsResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncAgentsResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response - """ - return AsyncAgentsResourceWithStreamingResponse(self) - - -class AgentsResourceWithRawResponse: - def __init__(self, agents: AgentsResource) -> None: - self._agents = agents - - @cached_property - def token(self) -> TokenResourceWithRawResponse: - return TokenResourceWithRawResponse(self._agents.token) - - -class AsyncAgentsResourceWithRawResponse: - def __init__(self, agents: AsyncAgentsResource) -> None: - self._agents = agents - - @cached_property - def token(self) -> AsyncTokenResourceWithRawResponse: - return AsyncTokenResourceWithRawResponse(self._agents.token) - - -class AgentsResourceWithStreamingResponse: - def __init__(self, agents: AgentsResource) -> None: - self._agents = agents - - @cached_property - def token(self) -> TokenResourceWithStreamingResponse: - return TokenResourceWithStreamingResponse(self._agents.token) - - -class AsyncAgentsResourceWithStreamingResponse: - def __init__(self, agents: AsyncAgentsResource) -> None: - self._agents = agents - - @cached_property - def token(self) -> AsyncTokenResourceWithStreamingResponse: - return AsyncTokenResourceWithStreamingResponse(self._agents.token) diff --git a/src/digitalocean_genai_sdk/resources/auth/agents/token.py b/src/digitalocean_genai_sdk/resources/auth/agents/token.py deleted file mode 100644 index 73ecef05..00000000 --- a/src/digitalocean_genai_sdk/resources/auth/agents/token.py +++ /dev/null @@ -1,173 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -import httpx - -from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven -from ...._utils import maybe_transform, async_maybe_transform -from ...._compat import cached_property -from ...._resource import SyncAPIResource, AsyncAPIResource -from ...._response import ( - to_raw_response_wrapper, - to_streamed_response_wrapper, - async_to_raw_response_wrapper, - async_to_streamed_response_wrapper, -) -from ...._base_client import make_request_options -from ....types.auth.agents import token_create_params -from ....types.auth.agents.token_create_response import TokenCreateResponse - -__all__ = ["TokenResource", "AsyncTokenResource"] - - -class TokenResource(SyncAPIResource): - @cached_property - def with_raw_response(self) -> TokenResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers - """ - return TokenResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> TokenResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response - """ - return TokenResourceWithStreamingResponse(self) - - def create( - self, - path_agent_uuid: str, - *, - body_agent_uuid: str | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> TokenCreateResponse: - """ - To issue an agent token, send a POST request to - `/v2/gen-ai/auth/agents/{agent_uuid}/token`. - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not path_agent_uuid: - raise ValueError(f"Expected a non-empty value for `path_agent_uuid` but received {path_agent_uuid!r}") - return self._post( - f"/v2/genai/auth/agents/{path_agent_uuid}/token", - body=maybe_transform({"body_agent_uuid": body_agent_uuid}, token_create_params.TokenCreateParams), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=TokenCreateResponse, - ) - - -class AsyncTokenResource(AsyncAPIResource): - @cached_property - def with_raw_response(self) -> AsyncTokenResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers - """ - return AsyncTokenResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncTokenResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response - """ - return AsyncTokenResourceWithStreamingResponse(self) - - async def create( - self, - path_agent_uuid: str, - *, - body_agent_uuid: str | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> TokenCreateResponse: - """ - To issue an agent token, send a POST request to - `/v2/gen-ai/auth/agents/{agent_uuid}/token`. - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not path_agent_uuid: - raise ValueError(f"Expected a non-empty value for `path_agent_uuid` but received {path_agent_uuid!r}") - return await self._post( - f"/v2/genai/auth/agents/{path_agent_uuid}/token", - body=await async_maybe_transform( - {"body_agent_uuid": body_agent_uuid}, token_create_params.TokenCreateParams - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=TokenCreateResponse, - ) - - -class TokenResourceWithRawResponse: - def __init__(self, token: TokenResource) -> None: - self._token = token - - self.create = to_raw_response_wrapper( - token.create, - ) - - -class AsyncTokenResourceWithRawResponse: - def __init__(self, token: AsyncTokenResource) -> None: - self._token = token - - self.create = async_to_raw_response_wrapper( - token.create, - ) - - -class TokenResourceWithStreamingResponse: - def __init__(self, token: TokenResource) -> None: - self._token = token - - self.create = to_streamed_response_wrapper( - token.create, - ) - - -class AsyncTokenResourceWithStreamingResponse: - def __init__(self, token: AsyncTokenResource) -> None: - self._token = token - - self.create = async_to_streamed_response_wrapper( - token.create, - ) diff --git a/src/digitalocean_genai_sdk/resources/auth/auth.py b/src/digitalocean_genai_sdk/resources/auth/auth.py deleted file mode 100644 index 985fc56c..00000000 --- a/src/digitalocean_genai_sdk/resources/auth/auth.py +++ /dev/null @@ -1,102 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from ..._compat import cached_property -from ..._resource import SyncAPIResource, AsyncAPIResource -from .agents.agents import ( - AgentsResource, - AsyncAgentsResource, - AgentsResourceWithRawResponse, - AsyncAgentsResourceWithRawResponse, - AgentsResourceWithStreamingResponse, - AsyncAgentsResourceWithStreamingResponse, -) - -__all__ = ["AuthResource", "AsyncAuthResource"] - - -class AuthResource(SyncAPIResource): - @cached_property - def agents(self) -> AgentsResource: - return AgentsResource(self._client) - - @cached_property - def with_raw_response(self) -> AuthResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers - """ - return AuthResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AuthResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response - """ - return AuthResourceWithStreamingResponse(self) - - -class AsyncAuthResource(AsyncAPIResource): - @cached_property - def agents(self) -> AsyncAgentsResource: - return AsyncAgentsResource(self._client) - - @cached_property - def with_raw_response(self) -> AsyncAuthResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers - """ - return AsyncAuthResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncAuthResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response - """ - return AsyncAuthResourceWithStreamingResponse(self) - - -class AuthResourceWithRawResponse: - def __init__(self, auth: AuthResource) -> None: - self._auth = auth - - @cached_property - def agents(self) -> AgentsResourceWithRawResponse: - return AgentsResourceWithRawResponse(self._auth.agents) - - -class AsyncAuthResourceWithRawResponse: - def __init__(self, auth: AsyncAuthResource) -> None: - self._auth = auth - - @cached_property - def agents(self) -> AsyncAgentsResourceWithRawResponse: - return AsyncAgentsResourceWithRawResponse(self._auth.agents) - - -class AuthResourceWithStreamingResponse: - def __init__(self, auth: AuthResource) -> None: - self._auth = auth - - @cached_property - def agents(self) -> AgentsResourceWithStreamingResponse: - return AgentsResourceWithStreamingResponse(self._auth.agents) - - -class AsyncAuthResourceWithStreamingResponse: - def __init__(self, auth: AsyncAuthResource) -> None: - self._auth = auth - - @cached_property - def agents(self) -> AsyncAgentsResourceWithStreamingResponse: - return AsyncAgentsResourceWithStreamingResponse(self._auth.agents) diff --git a/src/digitalocean_genai_sdk/resources/embeddings.py b/src/digitalocean_genai_sdk/resources/embeddings.py deleted file mode 100644 index 1bcd3145..00000000 --- a/src/digitalocean_genai_sdk/resources/embeddings.py +++ /dev/null @@ -1,201 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import List, Union - -import httpx - -from ..types import embedding_create_params -from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven -from .._utils import maybe_transform, async_maybe_transform -from .._compat import cached_property -from .._resource import SyncAPIResource, AsyncAPIResource -from .._response import ( - to_raw_response_wrapper, - to_streamed_response_wrapper, - async_to_raw_response_wrapper, - async_to_streamed_response_wrapper, -) -from .._base_client import make_request_options -from ..types.embedding_create_response import EmbeddingCreateResponse - -__all__ = ["EmbeddingsResource", "AsyncEmbeddingsResource"] - - -class EmbeddingsResource(SyncAPIResource): - @cached_property - def with_raw_response(self) -> EmbeddingsResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers - """ - return EmbeddingsResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> EmbeddingsResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response - """ - return EmbeddingsResourceWithStreamingResponse(self) - - def create( - self, - *, - input: Union[str, List[str]], - model: str, - user: str | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> EmbeddingCreateResponse: - """ - Creates an embedding vector representing the input text. - - Args: - input: Input text to embed, encoded as a string or array of tokens. To embed multiple - inputs in a single request, pass an array of strings. - - model: ID of the model to use. You can use the List models API to see all of your - available models. - - user: A unique identifier representing your end-user, which can help DigitalOcean to - monitor and detect abuse. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return self._post( - "/embeddings", - body=maybe_transform( - { - "input": input, - "model": model, - "user": user, - }, - embedding_create_params.EmbeddingCreateParams, - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=EmbeddingCreateResponse, - ) - - -class AsyncEmbeddingsResource(AsyncAPIResource): - @cached_property - def with_raw_response(self) -> AsyncEmbeddingsResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers - """ - return AsyncEmbeddingsResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncEmbeddingsResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response - """ - return AsyncEmbeddingsResourceWithStreamingResponse(self) - - async def create( - self, - *, - input: Union[str, List[str]], - model: str, - user: str | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> EmbeddingCreateResponse: - """ - Creates an embedding vector representing the input text. - - Args: - input: Input text to embed, encoded as a string or array of tokens. To embed multiple - inputs in a single request, pass an array of strings. - - model: ID of the model to use. You can use the List models API to see all of your - available models. - - user: A unique identifier representing your end-user, which can help DigitalOcean to - monitor and detect abuse. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return await self._post( - "/embeddings", - body=await async_maybe_transform( - { - "input": input, - "model": model, - "user": user, - }, - embedding_create_params.EmbeddingCreateParams, - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=EmbeddingCreateResponse, - ) - - -class EmbeddingsResourceWithRawResponse: - def __init__(self, embeddings: EmbeddingsResource) -> None: - self._embeddings = embeddings - - self.create = to_raw_response_wrapper( - embeddings.create, - ) - - -class AsyncEmbeddingsResourceWithRawResponse: - def __init__(self, embeddings: AsyncEmbeddingsResource) -> None: - self._embeddings = embeddings - - self.create = async_to_raw_response_wrapper( - embeddings.create, - ) - - -class EmbeddingsResourceWithStreamingResponse: - def __init__(self, embeddings: EmbeddingsResource) -> None: - self._embeddings = embeddings - - self.create = to_streamed_response_wrapper( - embeddings.create, - ) - - -class AsyncEmbeddingsResourceWithStreamingResponse: - def __init__(self, embeddings: AsyncEmbeddingsResource) -> None: - self._embeddings = embeddings - - self.create = async_to_streamed_response_wrapper( - embeddings.create, - ) diff --git a/src/digitalocean_genai_sdk/resources/regions.py b/src/digitalocean_genai_sdk/resources/regions.py deleted file mode 100644 index d506688b..00000000 --- a/src/digitalocean_genai_sdk/resources/regions.py +++ /dev/null @@ -1,191 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -import httpx - -from ..types import region_list_params -from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven -from .._utils import maybe_transform, async_maybe_transform -from .._compat import cached_property -from .._resource import SyncAPIResource, AsyncAPIResource -from .._response import ( - to_raw_response_wrapper, - to_streamed_response_wrapper, - async_to_raw_response_wrapper, - async_to_streamed_response_wrapper, -) -from .._base_client import make_request_options -from ..types.region_list_response import RegionListResponse - -__all__ = ["RegionsResource", "AsyncRegionsResource"] - - -class RegionsResource(SyncAPIResource): - @cached_property - def with_raw_response(self) -> RegionsResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers - """ - return RegionsResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> RegionsResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response - """ - return RegionsResourceWithStreamingResponse(self) - - def list( - self, - *, - serves_batch: bool | NotGiven = NOT_GIVEN, - serves_inference: bool | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> RegionListResponse: - """ - To list all datacenter regions, send a GET request to `/v2/gen-ai/regions`. - - Args: - serves_batch: include datacenters that are capable of running batch jobs. - - serves_inference: include datacenters that serve inference. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return self._get( - "/v2/genai/regions", - options=make_request_options( - extra_headers=extra_headers, - extra_query=extra_query, - extra_body=extra_body, - timeout=timeout, - query=maybe_transform( - { - "serves_batch": serves_batch, - "serves_inference": serves_inference, - }, - region_list_params.RegionListParams, - ), - ), - cast_to=RegionListResponse, - ) - - -class AsyncRegionsResource(AsyncAPIResource): - @cached_property - def with_raw_response(self) -> AsyncRegionsResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers - """ - return AsyncRegionsResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncRegionsResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response - """ - return AsyncRegionsResourceWithStreamingResponse(self) - - async def list( - self, - *, - serves_batch: bool | NotGiven = NOT_GIVEN, - serves_inference: bool | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> RegionListResponse: - """ - To list all datacenter regions, send a GET request to `/v2/gen-ai/regions`. - - Args: - serves_batch: include datacenters that are capable of running batch jobs. - - serves_inference: include datacenters that serve inference. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return await self._get( - "/v2/genai/regions", - options=make_request_options( - extra_headers=extra_headers, - extra_query=extra_query, - extra_body=extra_body, - timeout=timeout, - query=await async_maybe_transform( - { - "serves_batch": serves_batch, - "serves_inference": serves_inference, - }, - region_list_params.RegionListParams, - ), - ), - cast_to=RegionListResponse, - ) - - -class RegionsResourceWithRawResponse: - def __init__(self, regions: RegionsResource) -> None: - self._regions = regions - - self.list = to_raw_response_wrapper( - regions.list, - ) - - -class AsyncRegionsResourceWithRawResponse: - def __init__(self, regions: AsyncRegionsResource) -> None: - self._regions = regions - - self.list = async_to_raw_response_wrapper( - regions.list, - ) - - -class RegionsResourceWithStreamingResponse: - def __init__(self, regions: RegionsResource) -> None: - self._regions = regions - - self.list = to_streamed_response_wrapper( - regions.list, - ) - - -class AsyncRegionsResourceWithStreamingResponse: - def __init__(self, regions: AsyncRegionsResource) -> None: - self._regions = regions - - self.list = async_to_streamed_response_wrapper( - regions.list, - ) diff --git a/src/digitalocean_genai_sdk/types/auth/agents/__init__.py b/src/digitalocean_genai_sdk/types/auth/agents/__init__.py deleted file mode 100644 index 9fae55b6..00000000 --- a/src/digitalocean_genai_sdk/types/auth/agents/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from .token_create_params import TokenCreateParams as TokenCreateParams -from .token_create_response import TokenCreateResponse as TokenCreateResponse diff --git a/src/digitalocean_genai_sdk/types/auth/agents/token_create_params.py b/src/digitalocean_genai_sdk/types/auth/agents/token_create_params.py deleted file mode 100644 index 0df640f9..00000000 --- a/src/digitalocean_genai_sdk/types/auth/agents/token_create_params.py +++ /dev/null @@ -1,13 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Annotated, TypedDict - -from ...._utils import PropertyInfo - -__all__ = ["TokenCreateParams"] - - -class TokenCreateParams(TypedDict, total=False): - body_agent_uuid: Annotated[str, PropertyInfo(alias="agent_uuid")] diff --git a/src/digitalocean_genai_sdk/types/auth/agents/token_create_response.py b/src/digitalocean_genai_sdk/types/auth/agents/token_create_response.py deleted file mode 100644 index e58b7399..00000000 --- a/src/digitalocean_genai_sdk/types/auth/agents/token_create_response.py +++ /dev/null @@ -1,13 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Optional - -from ...._models import BaseModel - -__all__ = ["TokenCreateResponse"] - - -class TokenCreateResponse(BaseModel): - access_token: Optional[str] = None - - refresh_token: Optional[str] = None diff --git a/src/digitalocean_genai_sdk/types/chat_completion_request_message_content_part_text_param.py b/src/digitalocean_genai_sdk/types/chat_completion_request_message_content_part_text_param.py deleted file mode 100644 index 4aec9488..00000000 --- a/src/digitalocean_genai_sdk/types/chat_completion_request_message_content_part_text_param.py +++ /dev/null @@ -1,15 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["ChatCompletionRequestMessageContentPartTextParam"] - - -class ChatCompletionRequestMessageContentPartTextParam(TypedDict, total=False): - text: Required[str] - """The text content.""" - - type: Required[Literal["text"]] - """The type of the content part.""" diff --git a/src/digitalocean_genai_sdk/types/embedding_create_params.py b/src/digitalocean_genai_sdk/types/embedding_create_params.py deleted file mode 100644 index d3e923ad..00000000 --- a/src/digitalocean_genai_sdk/types/embedding_create_params.py +++ /dev/null @@ -1,28 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import List, Union -from typing_extensions import Required, TypedDict - -__all__ = ["EmbeddingCreateParams"] - - -class EmbeddingCreateParams(TypedDict, total=False): - input: Required[Union[str, List[str]]] - """Input text to embed, encoded as a string or array of tokens. - - To embed multiple inputs in a single request, pass an array of strings. - """ - - model: Required[str] - """ID of the model to use. - - You can use the List models API to see all of your available models. - """ - - user: str - """ - A unique identifier representing your end-user, which can help DigitalOcean to - monitor and detect abuse. - """ diff --git a/src/digitalocean_genai_sdk/types/embedding_create_response.py b/src/digitalocean_genai_sdk/types/embedding_create_response.py deleted file mode 100644 index 19c474fd..00000000 --- a/src/digitalocean_genai_sdk/types/embedding_create_response.py +++ /dev/null @@ -1,41 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List -from typing_extensions import Literal - -from .._models import BaseModel - -__all__ = ["EmbeddingCreateResponse", "Data", "Usage"] - - -class Data(BaseModel): - embedding: List[float] - """The embedding vector, which is a list of floats.""" - - index: int - """The index of the embedding in the list of embeddings.""" - - object: Literal["embedding"] - """The object type, which is always "embedding".""" - - -class Usage(BaseModel): - prompt_tokens: int - """The number of tokens used by the prompt.""" - - total_tokens: int - """The total number of tokens used by the request.""" - - -class EmbeddingCreateResponse(BaseModel): - data: List[Data] - """The list of embeddings generated by the model.""" - - model: str - """The name of the model used to generate the embedding.""" - - object: Literal["list"] - """The object type, which is always "list".""" - - usage: Usage - """The usage information for the request.""" diff --git a/src/digitalocean_genai_sdk/__init__.py b/src/gradientai/__init__.py similarity index 84% rename from src/digitalocean_genai_sdk/__init__.py rename to src/gradientai/__init__.py index fc240d83..3316fe47 100644 --- a/src/digitalocean_genai_sdk/__init__.py +++ b/src/gradientai/__init__.py @@ -10,11 +10,11 @@ Stream, Timeout, Transport, + GradientAI, AsyncClient, AsyncStream, RequestOptions, - DigitaloceanGenaiSDK, - AsyncDigitaloceanGenaiSDK, + AsyncGradientAI, ) from ._models import BaseModel from ._version import __title__, __version__ @@ -28,15 +28,15 @@ RateLimitError, APITimeoutError, BadRequestError, + GradientAIError, APIConnectionError, AuthenticationError, InternalServerError, PermissionDeniedError, UnprocessableEntityError, - DigitaloceanGenaiSDKError, APIResponseValidationError, ) -from ._base_client import DefaultHttpxClient, DefaultAsyncHttpxClient +from ._base_client import DefaultHttpxClient, DefaultAioHttpClient, DefaultAsyncHttpxClient from ._utils._logs import setup_logging as _setup_logging __all__ = [ @@ -49,7 +49,7 @@ "NotGiven", "NOT_GIVEN", "Omit", - "DigitaloceanGenaiSDKError", + "GradientAIError", "APIError", "APIStatusError", "APITimeoutError", @@ -69,8 +69,8 @@ "AsyncClient", "Stream", "AsyncStream", - "DigitaloceanGenaiSDK", - "AsyncDigitaloceanGenaiSDK", + "GradientAI", + "AsyncGradientAI", "file_from_path", "BaseModel", "DEFAULT_TIMEOUT", @@ -78,6 +78,7 @@ "DEFAULT_CONNECTION_LIMITS", "DefaultHttpxClient", "DefaultAsyncHttpxClient", + "DefaultAioHttpClient", ] if not _t.TYPE_CHECKING: @@ -88,12 +89,12 @@ # Update the __module__ attribute for exported symbols so that # error messages point to this module instead of the module # it was originally defined in, e.g. -# digitalocean_genai_sdk._exceptions.NotFoundError -> digitalocean_genai_sdk.NotFoundError +# gradientai._exceptions.NotFoundError -> gradientai.NotFoundError __locals = locals() for __name in __all__: if not __name.startswith("__"): try: - __locals[__name].__module__ = "digitalocean_genai_sdk" + __locals[__name].__module__ = "gradientai" except (TypeError, AttributeError): # Some of our exported symbols are builtins which we can't set attributes for. pass diff --git a/src/digitalocean_genai_sdk/_base_client.py b/src/gradientai/_base_client.py similarity index 97% rename from src/digitalocean_genai_sdk/_base_client.py rename to src/gradientai/_base_client.py index 73cd30fc..6dce600b 100644 --- a/src/digitalocean_genai_sdk/_base_client.py +++ b/src/gradientai/_base_client.py @@ -389,7 +389,7 @@ def __init__( if max_retries is None: # pyright: ignore[reportUnnecessaryComparison] raise TypeError( - "max_retries cannot be None. If you want to disable retries, pass `0`; if you want unlimited retries, pass `math.inf` or a very high number; if you want the default behavior, pass `digitalocean_genai_sdk.DEFAULT_MAX_RETRIES`" + "max_retries cannot be None. If you want to disable retries, pass `0`; if you want unlimited retries, pass `math.inf` or a very high number; if you want the default behavior, pass `gradientai.DEFAULT_MAX_RETRIES`" ) def _enforce_trailing_slash(self, url: URL) -> URL: @@ -1071,7 +1071,14 @@ def _process_response( ) -> ResponseT: origin = get_origin(cast_to) or cast_to - if inspect.isclass(origin) and issubclass(origin, BaseAPIResponse): + if ( + inspect.isclass(origin) + and issubclass(origin, BaseAPIResponse) + # we only want to actually return the custom BaseAPIResponse class if we're + # returning the raw response, or if we're not streaming SSE, as if we're streaming + # SSE then `cast_to` doesn't actively reflect the type we need to parse into + and (not stream or bool(response.request.headers.get(RAW_RESPONSE_HEADER))) + ): if not issubclass(origin, APIResponse): raise TypeError(f"API Response types must subclass {APIResponse}; Received {origin}") @@ -1282,6 +1289,24 @@ def __init__(self, **kwargs: Any) -> None: super().__init__(**kwargs) +try: + import httpx_aiohttp +except ImportError: + + class _DefaultAioHttpClient(httpx.AsyncClient): + def __init__(self, **_kwargs: Any) -> None: + raise RuntimeError("To use the aiohttp client you must have installed the package with the `aiohttp` extra") +else: + + class _DefaultAioHttpClient(httpx_aiohttp.HttpxAiohttpClient): # type: ignore + def __init__(self, **kwargs: Any) -> None: + kwargs.setdefault("timeout", DEFAULT_TIMEOUT) + kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS) + kwargs.setdefault("follow_redirects", True) + + super().__init__(**kwargs) + + if TYPE_CHECKING: DefaultAsyncHttpxClient = httpx.AsyncClient """An alias to `httpx.AsyncClient` that provides the same defaults that this SDK @@ -1290,8 +1315,12 @@ def __init__(self, **kwargs: Any) -> None: This is useful because overriding the `http_client` with your own instance of `httpx.AsyncClient` will result in httpx's defaults being used, not ours. """ + + DefaultAioHttpClient = httpx.AsyncClient + """An alias to `httpx.AsyncClient` that changes the default HTTP transport to `aiohttp`.""" else: DefaultAsyncHttpxClient = _DefaultAsyncHttpxClient + DefaultAioHttpClient = _DefaultAioHttpClient class AsyncHttpxClientWrapper(DefaultAsyncHttpxClient): @@ -1574,7 +1603,14 @@ async def _process_response( ) -> ResponseT: origin = get_origin(cast_to) or cast_to - if inspect.isclass(origin) and issubclass(origin, BaseAPIResponse): + if ( + inspect.isclass(origin) + and issubclass(origin, BaseAPIResponse) + # we only want to actually return the custom BaseAPIResponse class if we're + # returning the raw response, or if we're not streaming SSE, as if we're streaming + # SSE then `cast_to` doesn't actively reflect the type we need to parse into + and (not stream or bool(response.request.headers.get(RAW_RESPONSE_HEADER))) + ): if not issubclass(origin, AsyncAPIResponse): raise TypeError(f"API Response types must subclass {AsyncAPIResponse}; Received {origin}") diff --git a/src/digitalocean_genai_sdk/_client.py b/src/gradientai/_client.py similarity index 77% rename from src/digitalocean_genai_sdk/_client.py rename to src/gradientai/_client.py index 2f86bb7d..71db35bc 100644 --- a/src/digitalocean_genai_sdk/_client.py +++ b/src/gradientai/_client.py @@ -23,7 +23,7 @@ from ._compat import cached_property from ._version import __version__ from ._streaming import Stream as Stream, AsyncStream as AsyncStream -from ._exceptions import APIStatusError, DigitaloceanGenaiSDKError +from ._exceptions import APIStatusError, GradientAIError from ._base_client import ( DEFAULT_MAX_RETRIES, SyncAPIClient, @@ -31,26 +31,13 @@ ) if TYPE_CHECKING: - from .resources import ( - auth, - chat, - agents, - models, - regions, - api_keys, - providers, - embeddings, - indexing_jobs, - knowledge_bases, - ) - from .resources.chat import ChatResource, AsyncChatResource + from .resources import chat, agents, models, regions, inference, providers, indexing_jobs, knowledge_bases from .resources.models import ModelsResource, AsyncModelsResource - from .resources.regions import RegionsResource, AsyncRegionsResource - from .resources.auth.auth import AuthResource, AsyncAuthResource - from .resources.embeddings import EmbeddingsResource, AsyncEmbeddingsResource + from .resources.chat.chat import ChatResource, AsyncChatResource from .resources.agents.agents import AgentsResource, AsyncAgentsResource from .resources.indexing_jobs import IndexingJobsResource, AsyncIndexingJobsResource - from .resources.api_keys.api_keys import APIKeysResource, AsyncAPIKeysResource + from .resources.regions.regions import RegionsResource, AsyncRegionsResource + from .resources.inference.inference import InferenceResource, AsyncInferenceResource from .resources.providers.providers import ProvidersResource, AsyncProvidersResource from .resources.knowledge_bases.knowledge_bases import KnowledgeBasesResource, AsyncKnowledgeBasesResource @@ -59,14 +46,14 @@ "Transport", "ProxiesTypes", "RequestOptions", - "DigitaloceanGenaiSDK", - "AsyncDigitaloceanGenaiSDK", + "GradientAI", + "AsyncGradientAI", "Client", "AsyncClient", ] -class DigitaloceanGenaiSDK(SyncAPIClient): +class GradientAI(SyncAPIClient): # client options api_key: str @@ -93,20 +80,21 @@ def __init__( # part of our public interface in the future. _strict_response_validation: bool = False, ) -> None: - """Construct a new synchronous DigitaloceanGenaiSDK client instance. + """Construct a new synchronous GradientAI client instance. - This automatically infers the `api_key` argument from the `DIGITALOCEAN_GENAI_SDK_API_KEY` environment variable if it is not provided. + This automatically infers the `api_key` argument from the `GRADIENTAI_API_KEY` environment variable if it is not provided. """ if api_key is None: - api_key = os.environ.get("DIGITALOCEAN_GENAI_SDK_API_KEY") + api_key = os.environ.get("GRADIENTAI_API_KEY") if api_key is None: - raise DigitaloceanGenaiSDKError( - "The api_key client option must be set either by passing api_key to the client or by setting the DIGITALOCEAN_GENAI_SDK_API_KEY environment variable" + raise GradientAIError( + "The api_key client option must be set either by passing api_key to the client or by setting the GRADIENTAI_API_KEY environment variable" ) self.api_key = api_key if base_url is None: - base_url = os.environ.get("DIGITALOCEAN_GENAI_SDK_BASE_URL") + base_url = os.environ.get("GRADIENT_AI_BASE_URL") + self._base_url_overridden = base_url is not None if base_url is None: base_url = f"https://api.digitalocean.com/" @@ -133,12 +121,6 @@ def providers(self) -> ProvidersResource: return ProvidersResource(self) - @cached_property - def auth(self) -> AuthResource: - from .resources.auth import AuthResource - - return AuthResource(self) - @cached_property def regions(self) -> RegionsResource: from .resources.regions import RegionsResource @@ -157,12 +139,6 @@ def knowledge_bases(self) -> KnowledgeBasesResource: return KnowledgeBasesResource(self) - @cached_property - def api_keys(self) -> APIKeysResource: - from .resources.api_keys import APIKeysResource - - return APIKeysResource(self) - @cached_property def chat(self) -> ChatResource: from .resources.chat import ChatResource @@ -170,10 +146,10 @@ def chat(self) -> ChatResource: return ChatResource(self) @cached_property - def embeddings(self) -> EmbeddingsResource: - from .resources.embeddings import EmbeddingsResource + def inference(self) -> InferenceResource: + from .resources.inference import InferenceResource - return EmbeddingsResource(self) + return InferenceResource(self) @cached_property def models(self) -> ModelsResource: @@ -182,12 +158,12 @@ def models(self) -> ModelsResource: return ModelsResource(self) @cached_property - def with_raw_response(self) -> DigitaloceanGenaiSDKWithRawResponse: - return DigitaloceanGenaiSDKWithRawResponse(self) + def with_raw_response(self) -> GradientAIWithRawResponse: + return GradientAIWithRawResponse(self) @cached_property - def with_streaming_response(self) -> DigitaloceanGenaiSDKWithStreamedResponse: - return DigitaloceanGenaiSDKWithStreamedResponse(self) + def with_streaming_response(self) -> GradientAIWithStreamedResponse: + return GradientAIWithStreamedResponse(self) @property @override @@ -245,7 +221,7 @@ def copy( params = set_default_query http_client = http_client or self._client - return self.__class__( + client = self.__class__( api_key=api_key or self.api_key, base_url=base_url or self.base_url, timeout=self.timeout if isinstance(timeout, NotGiven) else timeout, @@ -255,6 +231,8 @@ def copy( default_query=params, **_extra_kwargs, ) + client._base_url_overridden = self._base_url_overridden or base_url is not None + return client # Alias for `copy` for nicer inline usage, e.g. # client.with_options(timeout=10).foo.create(...) @@ -294,7 +272,7 @@ def _make_status_error( return APIStatusError(err_msg, response=response, body=body) -class AsyncDigitaloceanGenaiSDK(AsyncAPIClient): +class AsyncGradientAI(AsyncAPIClient): # client options api_key: str @@ -321,20 +299,21 @@ def __init__( # part of our public interface in the future. _strict_response_validation: bool = False, ) -> None: - """Construct a new async AsyncDigitaloceanGenaiSDK client instance. + """Construct a new async AsyncGradientAI client instance. - This automatically infers the `api_key` argument from the `DIGITALOCEAN_GENAI_SDK_API_KEY` environment variable if it is not provided. + This automatically infers the `api_key` argument from the `GRADIENTAI_API_KEY` environment variable if it is not provided. """ if api_key is None: - api_key = os.environ.get("DIGITALOCEAN_GENAI_SDK_API_KEY") + api_key = os.environ.get("GRADIENTAI_API_KEY") if api_key is None: - raise DigitaloceanGenaiSDKError( - "The api_key client option must be set either by passing api_key to the client or by setting the DIGITALOCEAN_GENAI_SDK_API_KEY environment variable" + raise GradientAIError( + "The api_key client option must be set either by passing api_key to the client or by setting the GRADIENTAI_API_KEY environment variable" ) self.api_key = api_key if base_url is None: - base_url = os.environ.get("DIGITALOCEAN_GENAI_SDK_BASE_URL") + base_url = os.environ.get("GRADIENT_AI_BASE_URL") + self._base_url_overridden = base_url is not None if base_url is None: base_url = f"https://api.digitalocean.com/" @@ -361,12 +340,6 @@ def providers(self) -> AsyncProvidersResource: return AsyncProvidersResource(self) - @cached_property - def auth(self) -> AsyncAuthResource: - from .resources.auth import AsyncAuthResource - - return AsyncAuthResource(self) - @cached_property def regions(self) -> AsyncRegionsResource: from .resources.regions import AsyncRegionsResource @@ -385,12 +358,6 @@ def knowledge_bases(self) -> AsyncKnowledgeBasesResource: return AsyncKnowledgeBasesResource(self) - @cached_property - def api_keys(self) -> AsyncAPIKeysResource: - from .resources.api_keys import AsyncAPIKeysResource - - return AsyncAPIKeysResource(self) - @cached_property def chat(self) -> AsyncChatResource: from .resources.chat import AsyncChatResource @@ -398,10 +365,10 @@ def chat(self) -> AsyncChatResource: return AsyncChatResource(self) @cached_property - def embeddings(self) -> AsyncEmbeddingsResource: - from .resources.embeddings import AsyncEmbeddingsResource + def inference(self) -> AsyncInferenceResource: + from .resources.inference import AsyncInferenceResource - return AsyncEmbeddingsResource(self) + return AsyncInferenceResource(self) @cached_property def models(self) -> AsyncModelsResource: @@ -410,12 +377,12 @@ def models(self) -> AsyncModelsResource: return AsyncModelsResource(self) @cached_property - def with_raw_response(self) -> AsyncDigitaloceanGenaiSDKWithRawResponse: - return AsyncDigitaloceanGenaiSDKWithRawResponse(self) + def with_raw_response(self) -> AsyncGradientAIWithRawResponse: + return AsyncGradientAIWithRawResponse(self) @cached_property - def with_streaming_response(self) -> AsyncDigitaloceanGenaiSDKWithStreamedResponse: - return AsyncDigitaloceanGenaiSDKWithStreamedResponse(self) + def with_streaming_response(self) -> AsyncGradientAIWithStreamedResponse: + return AsyncGradientAIWithStreamedResponse(self) @property @override @@ -473,7 +440,7 @@ def copy( params = set_default_query http_client = http_client or self._client - return self.__class__( + client = self.__class__( api_key=api_key or self.api_key, base_url=base_url or self.base_url, timeout=self.timeout if isinstance(timeout, NotGiven) else timeout, @@ -483,6 +450,8 @@ def copy( default_query=params, **_extra_kwargs, ) + client._base_url_overridden = self._base_url_overridden or base_url is not None + return client # Alias for `copy` for nicer inline usage, e.g. # client.with_options(timeout=10).foo.create(...) @@ -522,10 +491,10 @@ def _make_status_error( return APIStatusError(err_msg, response=response, body=body) -class DigitaloceanGenaiSDKWithRawResponse: - _client: DigitaloceanGenaiSDK +class GradientAIWithRawResponse: + _client: GradientAI - def __init__(self, client: DigitaloceanGenaiSDK) -> None: + def __init__(self, client: GradientAI) -> None: self._client = client @cached_property @@ -540,12 +509,6 @@ def providers(self) -> providers.ProvidersResourceWithRawResponse: return ProvidersResourceWithRawResponse(self._client.providers) - @cached_property - def auth(self) -> auth.AuthResourceWithRawResponse: - from .resources.auth import AuthResourceWithRawResponse - - return AuthResourceWithRawResponse(self._client.auth) - @cached_property def regions(self) -> regions.RegionsResourceWithRawResponse: from .resources.regions import RegionsResourceWithRawResponse @@ -564,12 +527,6 @@ def knowledge_bases(self) -> knowledge_bases.KnowledgeBasesResourceWithRawRespon return KnowledgeBasesResourceWithRawResponse(self._client.knowledge_bases) - @cached_property - def api_keys(self) -> api_keys.APIKeysResourceWithRawResponse: - from .resources.api_keys import APIKeysResourceWithRawResponse - - return APIKeysResourceWithRawResponse(self._client.api_keys) - @cached_property def chat(self) -> chat.ChatResourceWithRawResponse: from .resources.chat import ChatResourceWithRawResponse @@ -577,10 +534,10 @@ def chat(self) -> chat.ChatResourceWithRawResponse: return ChatResourceWithRawResponse(self._client.chat) @cached_property - def embeddings(self) -> embeddings.EmbeddingsResourceWithRawResponse: - from .resources.embeddings import EmbeddingsResourceWithRawResponse + def inference(self) -> inference.InferenceResourceWithRawResponse: + from .resources.inference import InferenceResourceWithRawResponse - return EmbeddingsResourceWithRawResponse(self._client.embeddings) + return InferenceResourceWithRawResponse(self._client.inference) @cached_property def models(self) -> models.ModelsResourceWithRawResponse: @@ -589,10 +546,10 @@ def models(self) -> models.ModelsResourceWithRawResponse: return ModelsResourceWithRawResponse(self._client.models) -class AsyncDigitaloceanGenaiSDKWithRawResponse: - _client: AsyncDigitaloceanGenaiSDK +class AsyncGradientAIWithRawResponse: + _client: AsyncGradientAI - def __init__(self, client: AsyncDigitaloceanGenaiSDK) -> None: + def __init__(self, client: AsyncGradientAI) -> None: self._client = client @cached_property @@ -607,12 +564,6 @@ def providers(self) -> providers.AsyncProvidersResourceWithRawResponse: return AsyncProvidersResourceWithRawResponse(self._client.providers) - @cached_property - def auth(self) -> auth.AsyncAuthResourceWithRawResponse: - from .resources.auth import AsyncAuthResourceWithRawResponse - - return AsyncAuthResourceWithRawResponse(self._client.auth) - @cached_property def regions(self) -> regions.AsyncRegionsResourceWithRawResponse: from .resources.regions import AsyncRegionsResourceWithRawResponse @@ -631,12 +582,6 @@ def knowledge_bases(self) -> knowledge_bases.AsyncKnowledgeBasesResourceWithRawR return AsyncKnowledgeBasesResourceWithRawResponse(self._client.knowledge_bases) - @cached_property - def api_keys(self) -> api_keys.AsyncAPIKeysResourceWithRawResponse: - from .resources.api_keys import AsyncAPIKeysResourceWithRawResponse - - return AsyncAPIKeysResourceWithRawResponse(self._client.api_keys) - @cached_property def chat(self) -> chat.AsyncChatResourceWithRawResponse: from .resources.chat import AsyncChatResourceWithRawResponse @@ -644,10 +589,10 @@ def chat(self) -> chat.AsyncChatResourceWithRawResponse: return AsyncChatResourceWithRawResponse(self._client.chat) @cached_property - def embeddings(self) -> embeddings.AsyncEmbeddingsResourceWithRawResponse: - from .resources.embeddings import AsyncEmbeddingsResourceWithRawResponse + def inference(self) -> inference.AsyncInferenceResourceWithRawResponse: + from .resources.inference import AsyncInferenceResourceWithRawResponse - return AsyncEmbeddingsResourceWithRawResponse(self._client.embeddings) + return AsyncInferenceResourceWithRawResponse(self._client.inference) @cached_property def models(self) -> models.AsyncModelsResourceWithRawResponse: @@ -656,10 +601,10 @@ def models(self) -> models.AsyncModelsResourceWithRawResponse: return AsyncModelsResourceWithRawResponse(self._client.models) -class DigitaloceanGenaiSDKWithStreamedResponse: - _client: DigitaloceanGenaiSDK +class GradientAIWithStreamedResponse: + _client: GradientAI - def __init__(self, client: DigitaloceanGenaiSDK) -> None: + def __init__(self, client: GradientAI) -> None: self._client = client @cached_property @@ -674,12 +619,6 @@ def providers(self) -> providers.ProvidersResourceWithStreamingResponse: return ProvidersResourceWithStreamingResponse(self._client.providers) - @cached_property - def auth(self) -> auth.AuthResourceWithStreamingResponse: - from .resources.auth import AuthResourceWithStreamingResponse - - return AuthResourceWithStreamingResponse(self._client.auth) - @cached_property def regions(self) -> regions.RegionsResourceWithStreamingResponse: from .resources.regions import RegionsResourceWithStreamingResponse @@ -698,12 +637,6 @@ def knowledge_bases(self) -> knowledge_bases.KnowledgeBasesResourceWithStreaming return KnowledgeBasesResourceWithStreamingResponse(self._client.knowledge_bases) - @cached_property - def api_keys(self) -> api_keys.APIKeysResourceWithStreamingResponse: - from .resources.api_keys import APIKeysResourceWithStreamingResponse - - return APIKeysResourceWithStreamingResponse(self._client.api_keys) - @cached_property def chat(self) -> chat.ChatResourceWithStreamingResponse: from .resources.chat import ChatResourceWithStreamingResponse @@ -711,10 +644,10 @@ def chat(self) -> chat.ChatResourceWithStreamingResponse: return ChatResourceWithStreamingResponse(self._client.chat) @cached_property - def embeddings(self) -> embeddings.EmbeddingsResourceWithStreamingResponse: - from .resources.embeddings import EmbeddingsResourceWithStreamingResponse + def inference(self) -> inference.InferenceResourceWithStreamingResponse: + from .resources.inference import InferenceResourceWithStreamingResponse - return EmbeddingsResourceWithStreamingResponse(self._client.embeddings) + return InferenceResourceWithStreamingResponse(self._client.inference) @cached_property def models(self) -> models.ModelsResourceWithStreamingResponse: @@ -723,10 +656,10 @@ def models(self) -> models.ModelsResourceWithStreamingResponse: return ModelsResourceWithStreamingResponse(self._client.models) -class AsyncDigitaloceanGenaiSDKWithStreamedResponse: - _client: AsyncDigitaloceanGenaiSDK +class AsyncGradientAIWithStreamedResponse: + _client: AsyncGradientAI - def __init__(self, client: AsyncDigitaloceanGenaiSDK) -> None: + def __init__(self, client: AsyncGradientAI) -> None: self._client = client @cached_property @@ -741,12 +674,6 @@ def providers(self) -> providers.AsyncProvidersResourceWithStreamingResponse: return AsyncProvidersResourceWithStreamingResponse(self._client.providers) - @cached_property - def auth(self) -> auth.AsyncAuthResourceWithStreamingResponse: - from .resources.auth import AsyncAuthResourceWithStreamingResponse - - return AsyncAuthResourceWithStreamingResponse(self._client.auth) - @cached_property def regions(self) -> regions.AsyncRegionsResourceWithStreamingResponse: from .resources.regions import AsyncRegionsResourceWithStreamingResponse @@ -765,12 +692,6 @@ def knowledge_bases(self) -> knowledge_bases.AsyncKnowledgeBasesResourceWithStre return AsyncKnowledgeBasesResourceWithStreamingResponse(self._client.knowledge_bases) - @cached_property - def api_keys(self) -> api_keys.AsyncAPIKeysResourceWithStreamingResponse: - from .resources.api_keys import AsyncAPIKeysResourceWithStreamingResponse - - return AsyncAPIKeysResourceWithStreamingResponse(self._client.api_keys) - @cached_property def chat(self) -> chat.AsyncChatResourceWithStreamingResponse: from .resources.chat import AsyncChatResourceWithStreamingResponse @@ -778,10 +699,10 @@ def chat(self) -> chat.AsyncChatResourceWithStreamingResponse: return AsyncChatResourceWithStreamingResponse(self._client.chat) @cached_property - def embeddings(self) -> embeddings.AsyncEmbeddingsResourceWithStreamingResponse: - from .resources.embeddings import AsyncEmbeddingsResourceWithStreamingResponse + def inference(self) -> inference.AsyncInferenceResourceWithStreamingResponse: + from .resources.inference import AsyncInferenceResourceWithStreamingResponse - return AsyncEmbeddingsResourceWithStreamingResponse(self._client.embeddings) + return AsyncInferenceResourceWithStreamingResponse(self._client.inference) @cached_property def models(self) -> models.AsyncModelsResourceWithStreamingResponse: @@ -790,6 +711,6 @@ def models(self) -> models.AsyncModelsResourceWithStreamingResponse: return AsyncModelsResourceWithStreamingResponse(self._client.models) -Client = DigitaloceanGenaiSDK +Client = GradientAI -AsyncClient = AsyncDigitaloceanGenaiSDK +AsyncClient = AsyncGradientAI diff --git a/src/digitalocean_genai_sdk/_compat.py b/src/gradientai/_compat.py similarity index 100% rename from src/digitalocean_genai_sdk/_compat.py rename to src/gradientai/_compat.py diff --git a/src/digitalocean_genai_sdk/_constants.py b/src/gradientai/_constants.py similarity index 100% rename from src/digitalocean_genai_sdk/_constants.py rename to src/gradientai/_constants.py diff --git a/src/digitalocean_genai_sdk/_exceptions.py b/src/gradientai/_exceptions.py similarity index 97% rename from src/digitalocean_genai_sdk/_exceptions.py rename to src/gradientai/_exceptions.py index 755e166e..759c8d86 100644 --- a/src/digitalocean_genai_sdk/_exceptions.py +++ b/src/gradientai/_exceptions.py @@ -18,11 +18,11 @@ ] -class DigitaloceanGenaiSDKError(Exception): +class GradientAIError(Exception): pass -class APIError(DigitaloceanGenaiSDKError): +class APIError(GradientAIError): message: str request: httpx.Request diff --git a/src/digitalocean_genai_sdk/_files.py b/src/gradientai/_files.py similarity index 100% rename from src/digitalocean_genai_sdk/_files.py rename to src/gradientai/_files.py diff --git a/src/digitalocean_genai_sdk/_models.py b/src/gradientai/_models.py similarity index 100% rename from src/digitalocean_genai_sdk/_models.py rename to src/gradientai/_models.py diff --git a/src/digitalocean_genai_sdk/_qs.py b/src/gradientai/_qs.py similarity index 100% rename from src/digitalocean_genai_sdk/_qs.py rename to src/gradientai/_qs.py diff --git a/src/digitalocean_genai_sdk/_resource.py b/src/gradientai/_resource.py similarity index 76% rename from src/digitalocean_genai_sdk/_resource.py rename to src/gradientai/_resource.py index fe43ec28..9182ee0b 100644 --- a/src/digitalocean_genai_sdk/_resource.py +++ b/src/gradientai/_resource.py @@ -8,13 +8,13 @@ import anyio if TYPE_CHECKING: - from ._client import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK + from ._client import GradientAI, AsyncGradientAI class SyncAPIResource: - _client: DigitaloceanGenaiSDK + _client: GradientAI - def __init__(self, client: DigitaloceanGenaiSDK) -> None: + def __init__(self, client: GradientAI) -> None: self._client = client self._get = client.get self._post = client.post @@ -28,9 +28,9 @@ def _sleep(self, seconds: float) -> None: class AsyncAPIResource: - _client: AsyncDigitaloceanGenaiSDK + _client: AsyncGradientAI - def __init__(self, client: AsyncDigitaloceanGenaiSDK) -> None: + def __init__(self, client: AsyncGradientAI) -> None: self._client = client self._get = client.get self._post = client.post diff --git a/src/digitalocean_genai_sdk/_response.py b/src/gradientai/_response.py similarity index 98% rename from src/digitalocean_genai_sdk/_response.py rename to src/gradientai/_response.py index 7f1fff1d..2037e4ca 100644 --- a/src/digitalocean_genai_sdk/_response.py +++ b/src/gradientai/_response.py @@ -29,7 +29,7 @@ from ._models import BaseModel, is_basemodel from ._constants import RAW_RESPONSE_HEADER, OVERRIDE_CAST_TO_HEADER from ._streaming import Stream, AsyncStream, is_stream_class_type, extract_stream_chunk_type -from ._exceptions import DigitaloceanGenaiSDKError, APIResponseValidationError +from ._exceptions import GradientAIError, APIResponseValidationError if TYPE_CHECKING: from ._models import FinalRequestOptions @@ -218,7 +218,7 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T: and issubclass(origin, pydantic.BaseModel) ): raise TypeError( - "Pydantic models must subclass our base model type, e.g. `from digitalocean_genai_sdk import BaseModel`" + "Pydantic models must subclass our base model type, e.g. `from gradientai import BaseModel`" ) if ( @@ -285,7 +285,7 @@ def parse(self, *, to: type[_T] | None = None) -> R | _T: the `to` argument, e.g. ```py - from digitalocean_genai_sdk import BaseModel + from gradientai import BaseModel class MyModel(BaseModel): @@ -387,7 +387,7 @@ async def parse(self, *, to: type[_T] | None = None) -> R | _T: the `to` argument, e.g. ```py - from digitalocean_genai_sdk import BaseModel + from gradientai import BaseModel class MyModel(BaseModel): @@ -558,11 +558,11 @@ async def stream_to_file( class MissingStreamClassError(TypeError): def __init__(self) -> None: super().__init__( - "The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `digitalocean_genai_sdk._streaming` for reference", + "The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `gradientai._streaming` for reference", ) -class StreamAlreadyConsumed(DigitaloceanGenaiSDKError): +class StreamAlreadyConsumed(GradientAIError): """ Attempted to read or stream content, but the content has already been streamed. diff --git a/src/digitalocean_genai_sdk/_streaming.py b/src/gradientai/_streaming.py similarity index 98% rename from src/digitalocean_genai_sdk/_streaming.py rename to src/gradientai/_streaming.py index 96c3f3d3..bab5eb80 100644 --- a/src/digitalocean_genai_sdk/_streaming.py +++ b/src/gradientai/_streaming.py @@ -12,7 +12,7 @@ from ._utils import extract_type_var_from_base if TYPE_CHECKING: - from ._client import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK + from ._client import GradientAI, AsyncGradientAI _T = TypeVar("_T") @@ -30,7 +30,7 @@ def __init__( *, cast_to: type[_T], response: httpx.Response, - client: DigitaloceanGenaiSDK, + client: GradientAI, ) -> None: self.response = response self._cast_to = cast_to @@ -93,7 +93,7 @@ def __init__( *, cast_to: type[_T], response: httpx.Response, - client: AsyncDigitaloceanGenaiSDK, + client: AsyncGradientAI, ) -> None: self.response = response self._cast_to = cast_to diff --git a/src/digitalocean_genai_sdk/_types.py b/src/gradientai/_types.py similarity index 99% rename from src/digitalocean_genai_sdk/_types.py rename to src/gradientai/_types.py index 3c0d156e..1bac876d 100644 --- a/src/digitalocean_genai_sdk/_types.py +++ b/src/gradientai/_types.py @@ -81,7 +81,7 @@ # This unfortunately means that you will either have # to import this type and pass it explicitly: # -# from digitalocean_genai_sdk import NoneType +# from gradientai import NoneType # client.get('/foo', cast_to=NoneType) # # or build it yourself: diff --git a/src/digitalocean_genai_sdk/_utils/__init__.py b/src/gradientai/_utils/__init__.py similarity index 100% rename from src/digitalocean_genai_sdk/_utils/__init__.py rename to src/gradientai/_utils/__init__.py diff --git a/src/digitalocean_genai_sdk/_utils/_logs.py b/src/gradientai/_utils/_logs.py similarity index 67% rename from src/digitalocean_genai_sdk/_utils/_logs.py rename to src/gradientai/_utils/_logs.py index e0c1fee5..9047e5c8 100644 --- a/src/digitalocean_genai_sdk/_utils/_logs.py +++ b/src/gradientai/_utils/_logs.py @@ -1,12 +1,12 @@ import os import logging -logger: logging.Logger = logging.getLogger("digitalocean_genai_sdk") +logger: logging.Logger = logging.getLogger("gradientai") httpx_logger: logging.Logger = logging.getLogger("httpx") def _basic_config() -> None: - # e.g. [2023-10-05 14:12:26 - digitalocean_genai_sdk._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK" + # e.g. [2023-10-05 14:12:26 - gradientai._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK" logging.basicConfig( format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S", @@ -14,7 +14,7 @@ def _basic_config() -> None: def setup_logging() -> None: - env = os.environ.get("DIGITALOCEAN_GENAI_SDK_LOG") + env = os.environ.get("GRADIENT_AI_LOG") if env == "debug": _basic_config() logger.setLevel(logging.DEBUG) diff --git a/src/digitalocean_genai_sdk/_utils/_proxy.py b/src/gradientai/_utils/_proxy.py similarity index 100% rename from src/digitalocean_genai_sdk/_utils/_proxy.py rename to src/gradientai/_utils/_proxy.py diff --git a/src/digitalocean_genai_sdk/_utils/_reflection.py b/src/gradientai/_utils/_reflection.py similarity index 100% rename from src/digitalocean_genai_sdk/_utils/_reflection.py rename to src/gradientai/_utils/_reflection.py diff --git a/src/gradientai/_utils/_resources_proxy.py b/src/gradientai/_utils/_resources_proxy.py new file mode 100644 index 00000000..b3bc4931 --- /dev/null +++ b/src/gradientai/_utils/_resources_proxy.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from typing import Any +from typing_extensions import override + +from ._proxy import LazyProxy + + +class ResourcesProxy(LazyProxy[Any]): + """A proxy for the `gradientai.resources` module. + + This is used so that we can lazily import `gradientai.resources` only when + needed *and* so that users can just import `gradientai` and reference `gradientai.resources` + """ + + @override + def __load__(self) -> Any: + import importlib + + mod = importlib.import_module("gradientai.resources") + return mod + + +resources = ResourcesProxy().__as_proxied__() diff --git a/src/digitalocean_genai_sdk/_utils/_streams.py b/src/gradientai/_utils/_streams.py similarity index 100% rename from src/digitalocean_genai_sdk/_utils/_streams.py rename to src/gradientai/_utils/_streams.py diff --git a/src/digitalocean_genai_sdk/_utils/_sync.py b/src/gradientai/_utils/_sync.py similarity index 100% rename from src/digitalocean_genai_sdk/_utils/_sync.py rename to src/gradientai/_utils/_sync.py diff --git a/src/digitalocean_genai_sdk/_utils/_transform.py b/src/gradientai/_utils/_transform.py similarity index 100% rename from src/digitalocean_genai_sdk/_utils/_transform.py rename to src/gradientai/_utils/_transform.py diff --git a/src/digitalocean_genai_sdk/_utils/_typing.py b/src/gradientai/_utils/_typing.py similarity index 100% rename from src/digitalocean_genai_sdk/_utils/_typing.py rename to src/gradientai/_utils/_typing.py diff --git a/src/digitalocean_genai_sdk/_utils/_utils.py b/src/gradientai/_utils/_utils.py similarity index 100% rename from src/digitalocean_genai_sdk/_utils/_utils.py rename to src/gradientai/_utils/_utils.py diff --git a/src/gradientai/_version.py b/src/gradientai/_version.py new file mode 100644 index 00000000..4d3df522 --- /dev/null +++ b/src/gradientai/_version.py @@ -0,0 +1,4 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +__title__ = "gradientai" +__version__ = "0.1.0-alpha.4" # x-release-please-version diff --git a/src/gradientai/lib/.keep b/src/gradientai/lib/.keep new file mode 100644 index 00000000..5e2c99fd --- /dev/null +++ b/src/gradientai/lib/.keep @@ -0,0 +1,4 @@ +File generated from our OpenAPI spec by Stainless. + +This directory can be used to store custom files to expand the SDK. +It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. \ No newline at end of file diff --git a/src/digitalocean_genai_sdk/py.typed b/src/gradientai/py.typed similarity index 100% rename from src/digitalocean_genai_sdk/py.typed rename to src/gradientai/py.typed diff --git a/src/digitalocean_genai_sdk/resources/__init__.py b/src/gradientai/resources/__init__.py similarity index 71% rename from src/digitalocean_genai_sdk/resources/__init__.py rename to src/gradientai/resources/__init__.py index 6dcbff02..1763a13e 100644 --- a/src/digitalocean_genai_sdk/resources/__init__.py +++ b/src/gradientai/resources/__init__.py @@ -1,13 +1,5 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -from .auth import ( - AuthResource, - AsyncAuthResource, - AuthResourceWithRawResponse, - AsyncAuthResourceWithRawResponse, - AuthResourceWithStreamingResponse, - AsyncAuthResourceWithStreamingResponse, -) from .chat import ( ChatResource, AsyncChatResource, @@ -40,13 +32,13 @@ RegionsResourceWithStreamingResponse, AsyncRegionsResourceWithStreamingResponse, ) -from .api_keys import ( - APIKeysResource, - AsyncAPIKeysResource, - APIKeysResourceWithRawResponse, - AsyncAPIKeysResourceWithRawResponse, - APIKeysResourceWithStreamingResponse, - AsyncAPIKeysResourceWithStreamingResponse, +from .inference import ( + InferenceResource, + AsyncInferenceResource, + InferenceResourceWithRawResponse, + AsyncInferenceResourceWithRawResponse, + InferenceResourceWithStreamingResponse, + AsyncInferenceResourceWithStreamingResponse, ) from .providers import ( ProvidersResource, @@ -56,14 +48,6 @@ ProvidersResourceWithStreamingResponse, AsyncProvidersResourceWithStreamingResponse, ) -from .embeddings import ( - EmbeddingsResource, - AsyncEmbeddingsResource, - EmbeddingsResourceWithRawResponse, - AsyncEmbeddingsResourceWithRawResponse, - EmbeddingsResourceWithStreamingResponse, - AsyncEmbeddingsResourceWithStreamingResponse, -) from .indexing_jobs import ( IndexingJobsResource, AsyncIndexingJobsResource, @@ -94,12 +78,6 @@ "AsyncProvidersResourceWithRawResponse", "ProvidersResourceWithStreamingResponse", "AsyncProvidersResourceWithStreamingResponse", - "AuthResource", - "AsyncAuthResource", - "AuthResourceWithRawResponse", - "AsyncAuthResourceWithRawResponse", - "AuthResourceWithStreamingResponse", - "AsyncAuthResourceWithStreamingResponse", "RegionsResource", "AsyncRegionsResource", "RegionsResourceWithRawResponse", @@ -118,24 +96,18 @@ "AsyncKnowledgeBasesResourceWithRawResponse", "KnowledgeBasesResourceWithStreamingResponse", "AsyncKnowledgeBasesResourceWithStreamingResponse", - "APIKeysResource", - "AsyncAPIKeysResource", - "APIKeysResourceWithRawResponse", - "AsyncAPIKeysResourceWithRawResponse", - "APIKeysResourceWithStreamingResponse", - "AsyncAPIKeysResourceWithStreamingResponse", "ChatResource", "AsyncChatResource", "ChatResourceWithRawResponse", "AsyncChatResourceWithRawResponse", "ChatResourceWithStreamingResponse", "AsyncChatResourceWithStreamingResponse", - "EmbeddingsResource", - "AsyncEmbeddingsResource", - "EmbeddingsResourceWithRawResponse", - "AsyncEmbeddingsResourceWithRawResponse", - "EmbeddingsResourceWithStreamingResponse", - "AsyncEmbeddingsResourceWithStreamingResponse", + "InferenceResource", + "AsyncInferenceResource", + "InferenceResourceWithRawResponse", + "AsyncInferenceResourceWithRawResponse", + "InferenceResourceWithStreamingResponse", + "AsyncInferenceResourceWithStreamingResponse", "ModelsResource", "AsyncModelsResource", "ModelsResourceWithRawResponse", diff --git a/src/digitalocean_genai_sdk/resources/agents/__init__.py b/src/gradientai/resources/agents/__init__.py similarity index 100% rename from src/digitalocean_genai_sdk/resources/agents/__init__.py rename to src/gradientai/resources/agents/__init__.py diff --git a/src/digitalocean_genai_sdk/resources/agents/agents.py b/src/gradientai/resources/agents/agents.py similarity index 94% rename from src/digitalocean_genai_sdk/resources/agents/agents.py rename to src/gradientai/resources/agents/agents.py index 6d3ce525..63f0c4d4 100644 --- a/src/digitalocean_genai_sdk/resources/agents/agents.py +++ b/src/gradientai/resources/agents/agents.py @@ -104,7 +104,7 @@ def with_raw_response(self) -> AgentsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AgentsResourceWithRawResponse(self) @@ -113,7 +113,7 @@ def with_streaming_response(self) -> AgentsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AgentsResourceWithStreamingResponse(self) @@ -159,7 +159,9 @@ def create( timeout: Override the client-level default timeout for this request, in seconds """ return self._post( - "/v2/genai/agents", + "/v2/gen-ai/agents" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/agents", body=maybe_transform( { "anthropic_key_uuid": anthropic_key_uuid, @@ -209,7 +211,9 @@ def retrieve( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return self._get( - f"/v2/genai/agents/{uuid}", + f"/v2/gen-ai/agents/{uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -278,7 +282,9 @@ def update( if not path_uuid: raise ValueError(f"Expected a non-empty value for `path_uuid` but received {path_uuid!r}") return self._put( - f"/v2/genai/agents/{path_uuid}", + f"/v2/gen-ai/agents/{path_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_uuid}", body=maybe_transform( { "anthropic_key_uuid": anthropic_key_uuid, @@ -337,7 +343,9 @@ def list( timeout: Override the client-level default timeout for this request, in seconds """ return self._get( - "/v2/genai/agents", + "/v2/gen-ai/agents" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/agents", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -381,7 +389,9 @@ def delete( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return self._delete( - f"/v2/genai/agents/{uuid}", + f"/v2/gen-ai/agents/{uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -418,7 +428,9 @@ def update_status( if not path_uuid: raise ValueError(f"Expected a non-empty value for `path_uuid` but received {path_uuid!r}") return self._put( - f"/v2/genai/agents/{path_uuid}/deployment_visibility", + f"/v2/gen-ai/agents/{path_uuid}/deployment_visibility" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_uuid}/deployment_visibility", body=maybe_transform( { "body_uuid": body_uuid, @@ -460,7 +472,7 @@ def with_raw_response(self) -> AsyncAgentsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AsyncAgentsResourceWithRawResponse(self) @@ -469,7 +481,7 @@ def with_streaming_response(self) -> AsyncAgentsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AsyncAgentsResourceWithStreamingResponse(self) @@ -515,7 +527,9 @@ async def create( timeout: Override the client-level default timeout for this request, in seconds """ return await self._post( - "/v2/genai/agents", + "/v2/gen-ai/agents" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/agents", body=await async_maybe_transform( { "anthropic_key_uuid": anthropic_key_uuid, @@ -565,7 +579,9 @@ async def retrieve( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return await self._get( - f"/v2/genai/agents/{uuid}", + f"/v2/gen-ai/agents/{uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -634,7 +650,9 @@ async def update( if not path_uuid: raise ValueError(f"Expected a non-empty value for `path_uuid` but received {path_uuid!r}") return await self._put( - f"/v2/genai/agents/{path_uuid}", + f"/v2/gen-ai/agents/{path_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_uuid}", body=await async_maybe_transform( { "anthropic_key_uuid": anthropic_key_uuid, @@ -693,7 +711,9 @@ async def list( timeout: Override the client-level default timeout for this request, in seconds """ return await self._get( - "/v2/genai/agents", + "/v2/gen-ai/agents" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/agents", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -737,7 +757,9 @@ async def delete( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return await self._delete( - f"/v2/genai/agents/{uuid}", + f"/v2/gen-ai/agents/{uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -774,7 +796,9 @@ async def update_status( if not path_uuid: raise ValueError(f"Expected a non-empty value for `path_uuid` but received {path_uuid!r}") return await self._put( - f"/v2/genai/agents/{path_uuid}/deployment_visibility", + f"/v2/gen-ai/agents/{path_uuid}/deployment_visibility" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_uuid}/deployment_visibility", body=await async_maybe_transform( { "body_uuid": body_uuid, diff --git a/src/digitalocean_genai_sdk/resources/agents/api_keys.py b/src/gradientai/resources/agents/api_keys.py similarity index 90% rename from src/digitalocean_genai_sdk/resources/agents/api_keys.py rename to src/gradientai/resources/agents/api_keys.py index 451f5cb5..1cf2278e 100644 --- a/src/digitalocean_genai_sdk/resources/agents/api_keys.py +++ b/src/gradientai/resources/agents/api_keys.py @@ -32,7 +32,7 @@ def with_raw_response(self) -> APIKeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return APIKeysResourceWithRawResponse(self) @@ -41,7 +41,7 @@ def with_streaming_response(self) -> APIKeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return APIKeysResourceWithStreamingResponse(self) @@ -74,7 +74,9 @@ def create( if not path_agent_uuid: raise ValueError(f"Expected a non-empty value for `path_agent_uuid` but received {path_agent_uuid!r}") return self._post( - f"/v2/genai/agents/{path_agent_uuid}/api_keys", + f"/v2/gen-ai/agents/{path_agent_uuid}/api_keys" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_agent_uuid}/api_keys", body=maybe_transform( { "body_agent_uuid": body_agent_uuid, @@ -121,7 +123,9 @@ def update( if not path_api_key_uuid: raise ValueError(f"Expected a non-empty value for `path_api_key_uuid` but received {path_api_key_uuid!r}") return self._put( - f"/v2/genai/agents/{path_agent_uuid}/api_keys/{path_api_key_uuid}", + f"/v2/gen-ai/agents/{path_agent_uuid}/api_keys/{path_api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_agent_uuid}/api_keys/{path_api_key_uuid}", body=maybe_transform( { "body_agent_uuid": body_agent_uuid, @@ -169,7 +173,9 @@ def list( if not agent_uuid: raise ValueError(f"Expected a non-empty value for `agent_uuid` but received {agent_uuid!r}") return self._get( - f"/v2/genai/agents/{agent_uuid}/api_keys", + f"/v2/gen-ai/agents/{agent_uuid}/api_keys" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{agent_uuid}/api_keys", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -216,7 +222,9 @@ def delete( if not api_key_uuid: raise ValueError(f"Expected a non-empty value for `api_key_uuid` but received {api_key_uuid!r}") return self._delete( - f"/v2/genai/agents/{agent_uuid}/api_keys/{api_key_uuid}", + f"/v2/gen-ai/agents/{agent_uuid}/api_keys/{api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{agent_uuid}/api_keys/{api_key_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -253,7 +261,9 @@ def regenerate( if not api_key_uuid: raise ValueError(f"Expected a non-empty value for `api_key_uuid` but received {api_key_uuid!r}") return self._put( - f"/v2/genai/agents/{agent_uuid}/api_keys/{api_key_uuid}/regenerate", + f"/v2/gen-ai/agents/{agent_uuid}/api_keys/{api_key_uuid}/regenerate" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{agent_uuid}/api_keys/{api_key_uuid}/regenerate", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -268,7 +278,7 @@ def with_raw_response(self) -> AsyncAPIKeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AsyncAPIKeysResourceWithRawResponse(self) @@ -277,7 +287,7 @@ def with_streaming_response(self) -> AsyncAPIKeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AsyncAPIKeysResourceWithStreamingResponse(self) @@ -310,7 +320,9 @@ async def create( if not path_agent_uuid: raise ValueError(f"Expected a non-empty value for `path_agent_uuid` but received {path_agent_uuid!r}") return await self._post( - f"/v2/genai/agents/{path_agent_uuid}/api_keys", + f"/v2/gen-ai/agents/{path_agent_uuid}/api_keys" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_agent_uuid}/api_keys", body=await async_maybe_transform( { "body_agent_uuid": body_agent_uuid, @@ -357,7 +369,9 @@ async def update( if not path_api_key_uuid: raise ValueError(f"Expected a non-empty value for `path_api_key_uuid` but received {path_api_key_uuid!r}") return await self._put( - f"/v2/genai/agents/{path_agent_uuid}/api_keys/{path_api_key_uuid}", + f"/v2/gen-ai/agents/{path_agent_uuid}/api_keys/{path_api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_agent_uuid}/api_keys/{path_api_key_uuid}", body=await async_maybe_transform( { "body_agent_uuid": body_agent_uuid, @@ -405,7 +419,9 @@ async def list( if not agent_uuid: raise ValueError(f"Expected a non-empty value for `agent_uuid` but received {agent_uuid!r}") return await self._get( - f"/v2/genai/agents/{agent_uuid}/api_keys", + f"/v2/gen-ai/agents/{agent_uuid}/api_keys" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{agent_uuid}/api_keys", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -452,7 +468,9 @@ async def delete( if not api_key_uuid: raise ValueError(f"Expected a non-empty value for `api_key_uuid` but received {api_key_uuid!r}") return await self._delete( - f"/v2/genai/agents/{agent_uuid}/api_keys/{api_key_uuid}", + f"/v2/gen-ai/agents/{agent_uuid}/api_keys/{api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{agent_uuid}/api_keys/{api_key_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -489,7 +507,9 @@ async def regenerate( if not api_key_uuid: raise ValueError(f"Expected a non-empty value for `api_key_uuid` but received {api_key_uuid!r}") return await self._put( - f"/v2/genai/agents/{agent_uuid}/api_keys/{api_key_uuid}/regenerate", + f"/v2/gen-ai/agents/{agent_uuid}/api_keys/{api_key_uuid}/regenerate" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{agent_uuid}/api_keys/{api_key_uuid}/regenerate", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), diff --git a/src/digitalocean_genai_sdk/resources/agents/child_agents.py b/src/gradientai/resources/agents/child_agents.py similarity index 90% rename from src/digitalocean_genai_sdk/resources/agents/child_agents.py rename to src/gradientai/resources/agents/child_agents.py index 7d4ed3bb..ad30f106 100644 --- a/src/digitalocean_genai_sdk/resources/agents/child_agents.py +++ b/src/gradientai/resources/agents/child_agents.py @@ -31,7 +31,7 @@ def with_raw_response(self) -> ChildAgentsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return ChildAgentsResourceWithRawResponse(self) @@ -40,7 +40,7 @@ def with_streaming_response(self) -> ChildAgentsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return ChildAgentsResourceWithStreamingResponse(self) @@ -85,7 +85,9 @@ def update( f"Expected a non-empty value for `path_child_agent_uuid` but received {path_child_agent_uuid!r}" ) return self._put( - f"/v2/genai/agents/{path_parent_agent_uuid}/child_agents/{path_child_agent_uuid}", + f"/v2/gen-ai/agents/{path_parent_agent_uuid}/child_agents/{path_child_agent_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_parent_agent_uuid}/child_agents/{path_child_agent_uuid}", body=maybe_transform( { "body_child_agent_uuid": body_child_agent_uuid, @@ -132,7 +134,9 @@ def delete( if not child_agent_uuid: raise ValueError(f"Expected a non-empty value for `child_agent_uuid` but received {child_agent_uuid!r}") return self._delete( - f"/v2/genai/agents/{parent_agent_uuid}/child_agents/{child_agent_uuid}", + f"/v2/gen-ai/agents/{parent_agent_uuid}/child_agents/{child_agent_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{parent_agent_uuid}/child_agents/{child_agent_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -179,7 +183,9 @@ def add( f"Expected a non-empty value for `path_child_agent_uuid` but received {path_child_agent_uuid!r}" ) return self._post( - f"/v2/genai/agents/{path_parent_agent_uuid}/child_agents/{path_child_agent_uuid}", + f"/v2/gen-ai/agents/{path_parent_agent_uuid}/child_agents/{path_child_agent_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_parent_agent_uuid}/child_agents/{path_child_agent_uuid}", body=maybe_transform( { "body_child_agent_uuid": body_child_agent_uuid, @@ -222,7 +228,9 @@ def view( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return self._get( - f"/v2/genai/agents/{uuid}/child_agents", + f"/v2/gen-ai/agents/{uuid}/child_agents" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{uuid}/child_agents", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -237,7 +245,7 @@ def with_raw_response(self) -> AsyncChildAgentsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AsyncChildAgentsResourceWithRawResponse(self) @@ -246,7 +254,7 @@ def with_streaming_response(self) -> AsyncChildAgentsResourceWithStreamingRespon """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AsyncChildAgentsResourceWithStreamingResponse(self) @@ -291,7 +299,9 @@ async def update( f"Expected a non-empty value for `path_child_agent_uuid` but received {path_child_agent_uuid!r}" ) return await self._put( - f"/v2/genai/agents/{path_parent_agent_uuid}/child_agents/{path_child_agent_uuid}", + f"/v2/gen-ai/agents/{path_parent_agent_uuid}/child_agents/{path_child_agent_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_parent_agent_uuid}/child_agents/{path_child_agent_uuid}", body=await async_maybe_transform( { "body_child_agent_uuid": body_child_agent_uuid, @@ -338,7 +348,9 @@ async def delete( if not child_agent_uuid: raise ValueError(f"Expected a non-empty value for `child_agent_uuid` but received {child_agent_uuid!r}") return await self._delete( - f"/v2/genai/agents/{parent_agent_uuid}/child_agents/{child_agent_uuid}", + f"/v2/gen-ai/agents/{parent_agent_uuid}/child_agents/{child_agent_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{parent_agent_uuid}/child_agents/{child_agent_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -385,7 +397,9 @@ async def add( f"Expected a non-empty value for `path_child_agent_uuid` but received {path_child_agent_uuid!r}" ) return await self._post( - f"/v2/genai/agents/{path_parent_agent_uuid}/child_agents/{path_child_agent_uuid}", + f"/v2/gen-ai/agents/{path_parent_agent_uuid}/child_agents/{path_child_agent_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_parent_agent_uuid}/child_agents/{path_child_agent_uuid}", body=await async_maybe_transform( { "body_child_agent_uuid": body_child_agent_uuid, @@ -428,7 +442,9 @@ async def view( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return await self._get( - f"/v2/genai/agents/{uuid}/child_agents", + f"/v2/gen-ai/agents/{uuid}/child_agents" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{uuid}/child_agents", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), diff --git a/src/digitalocean_genai_sdk/resources/agents/functions.py b/src/gradientai/resources/agents/functions.py similarity index 91% rename from src/digitalocean_genai_sdk/resources/agents/functions.py rename to src/gradientai/resources/agents/functions.py index 89f9efa3..8c5f3f49 100644 --- a/src/digitalocean_genai_sdk/resources/agents/functions.py +++ b/src/gradientai/resources/agents/functions.py @@ -30,7 +30,7 @@ def with_raw_response(self) -> FunctionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return FunctionsResourceWithRawResponse(self) @@ -39,7 +39,7 @@ def with_streaming_response(self) -> FunctionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return FunctionsResourceWithStreamingResponse(self) @@ -77,7 +77,9 @@ def create( if not path_agent_uuid: raise ValueError(f"Expected a non-empty value for `path_agent_uuid` but received {path_agent_uuid!r}") return self._post( - f"/v2/genai/agents/{path_agent_uuid}/functions", + f"/v2/gen-ai/agents/{path_agent_uuid}/functions" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_agent_uuid}/functions", body=maybe_transform( { "body_agent_uuid": body_agent_uuid, @@ -134,7 +136,9 @@ def update( if not path_function_uuid: raise ValueError(f"Expected a non-empty value for `path_function_uuid` but received {path_function_uuid!r}") return self._put( - f"/v2/genai/agents/{path_agent_uuid}/functions/{path_function_uuid}", + f"/v2/gen-ai/agents/{path_agent_uuid}/functions/{path_function_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_agent_uuid}/functions/{path_function_uuid}", body=maybe_transform( { "body_agent_uuid": body_agent_uuid, @@ -184,7 +188,9 @@ def delete( if not function_uuid: raise ValueError(f"Expected a non-empty value for `function_uuid` but received {function_uuid!r}") return self._delete( - f"/v2/genai/agents/{agent_uuid}/functions/{function_uuid}", + f"/v2/gen-ai/agents/{agent_uuid}/functions/{function_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{agent_uuid}/functions/{function_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -199,7 +205,7 @@ def with_raw_response(self) -> AsyncFunctionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AsyncFunctionsResourceWithRawResponse(self) @@ -208,7 +214,7 @@ def with_streaming_response(self) -> AsyncFunctionsResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AsyncFunctionsResourceWithStreamingResponse(self) @@ -246,7 +252,9 @@ async def create( if not path_agent_uuid: raise ValueError(f"Expected a non-empty value for `path_agent_uuid` but received {path_agent_uuid!r}") return await self._post( - f"/v2/genai/agents/{path_agent_uuid}/functions", + f"/v2/gen-ai/agents/{path_agent_uuid}/functions" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_agent_uuid}/functions", body=await async_maybe_transform( { "body_agent_uuid": body_agent_uuid, @@ -303,7 +311,9 @@ async def update( if not path_function_uuid: raise ValueError(f"Expected a non-empty value for `path_function_uuid` but received {path_function_uuid!r}") return await self._put( - f"/v2/genai/agents/{path_agent_uuid}/functions/{path_function_uuid}", + f"/v2/gen-ai/agents/{path_agent_uuid}/functions/{path_function_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_agent_uuid}/functions/{path_function_uuid}", body=await async_maybe_transform( { "body_agent_uuid": body_agent_uuid, @@ -353,7 +363,9 @@ async def delete( if not function_uuid: raise ValueError(f"Expected a non-empty value for `function_uuid` but received {function_uuid!r}") return await self._delete( - f"/v2/genai/agents/{agent_uuid}/functions/{function_uuid}", + f"/v2/gen-ai/agents/{agent_uuid}/functions/{function_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{agent_uuid}/functions/{function_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), diff --git a/src/digitalocean_genai_sdk/resources/agents/knowledge_bases.py b/src/gradientai/resources/agents/knowledge_bases.py similarity index 89% rename from src/digitalocean_genai_sdk/resources/agents/knowledge_bases.py rename to src/gradientai/resources/agents/knowledge_bases.py index 4a091446..a5486c34 100644 --- a/src/digitalocean_genai_sdk/resources/agents/knowledge_bases.py +++ b/src/gradientai/resources/agents/knowledge_bases.py @@ -27,7 +27,7 @@ def with_raw_response(self) -> KnowledgeBasesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return KnowledgeBasesResourceWithRawResponse(self) @@ -36,7 +36,7 @@ def with_streaming_response(self) -> KnowledgeBasesResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return KnowledgeBasesResourceWithStreamingResponse(self) @@ -67,7 +67,9 @@ def attach( if not agent_uuid: raise ValueError(f"Expected a non-empty value for `agent_uuid` but received {agent_uuid!r}") return self._post( - f"/v2/genai/agents/{agent_uuid}/knowledge_bases", + f"/v2/gen-ai/agents/{agent_uuid}/knowledge_bases" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{agent_uuid}/knowledge_bases", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -106,7 +108,9 @@ def attach_single( f"Expected a non-empty value for `knowledge_base_uuid` but received {knowledge_base_uuid!r}" ) return self._post( - f"/v2/genai/agents/{agent_uuid}/knowledge_bases/{knowledge_base_uuid}", + f"/v2/gen-ai/agents/{agent_uuid}/knowledge_bases/{knowledge_base_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{agent_uuid}/knowledge_bases/{knowledge_base_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -145,7 +149,9 @@ def detach( f"Expected a non-empty value for `knowledge_base_uuid` but received {knowledge_base_uuid!r}" ) return self._delete( - f"/v2/genai/agents/{agent_uuid}/knowledge_bases/{knowledge_base_uuid}", + f"/v2/gen-ai/agents/{agent_uuid}/knowledge_bases/{knowledge_base_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{agent_uuid}/knowledge_bases/{knowledge_base_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -160,7 +166,7 @@ def with_raw_response(self) -> AsyncKnowledgeBasesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AsyncKnowledgeBasesResourceWithRawResponse(self) @@ -169,7 +175,7 @@ def with_streaming_response(self) -> AsyncKnowledgeBasesResourceWithStreamingRes """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AsyncKnowledgeBasesResourceWithStreamingResponse(self) @@ -200,7 +206,9 @@ async def attach( if not agent_uuid: raise ValueError(f"Expected a non-empty value for `agent_uuid` but received {agent_uuid!r}") return await self._post( - f"/v2/genai/agents/{agent_uuid}/knowledge_bases", + f"/v2/gen-ai/agents/{agent_uuid}/knowledge_bases" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{agent_uuid}/knowledge_bases", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -239,7 +247,9 @@ async def attach_single( f"Expected a non-empty value for `knowledge_base_uuid` but received {knowledge_base_uuid!r}" ) return await self._post( - f"/v2/genai/agents/{agent_uuid}/knowledge_bases/{knowledge_base_uuid}", + f"/v2/gen-ai/agents/{agent_uuid}/knowledge_bases/{knowledge_base_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{agent_uuid}/knowledge_bases/{knowledge_base_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -278,7 +288,9 @@ async def detach( f"Expected a non-empty value for `knowledge_base_uuid` but received {knowledge_base_uuid!r}" ) return await self._delete( - f"/v2/genai/agents/{agent_uuid}/knowledge_bases/{knowledge_base_uuid}", + f"/v2/gen-ai/agents/{agent_uuid}/knowledge_bases/{knowledge_base_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{agent_uuid}/knowledge_bases/{knowledge_base_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), diff --git a/src/digitalocean_genai_sdk/resources/agents/versions.py b/src/gradientai/resources/agents/versions.py similarity index 91% rename from src/digitalocean_genai_sdk/resources/agents/versions.py rename to src/gradientai/resources/agents/versions.py index e77a252b..65a35472 100644 --- a/src/digitalocean_genai_sdk/resources/agents/versions.py +++ b/src/gradientai/resources/agents/versions.py @@ -29,7 +29,7 @@ def with_raw_response(self) -> VersionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return VersionsResourceWithRawResponse(self) @@ -38,7 +38,7 @@ def with_streaming_response(self) -> VersionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return VersionsResourceWithStreamingResponse(self) @@ -71,7 +71,9 @@ def update( if not path_uuid: raise ValueError(f"Expected a non-empty value for `path_uuid` but received {path_uuid!r}") return self._put( - f"/v2/gen-ai/agents/{path_uuid}/versions", + f"/v2/gen-ai/agents/{path_uuid}/versions" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_uuid}/versions", body=maybe_transform( { "body_uuid": body_uuid, @@ -118,7 +120,9 @@ def list( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return self._get( - f"/v2/gen-ai/agents/{uuid}/versions", + f"/v2/gen-ai/agents/{uuid}/versions" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{uuid}/versions", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -143,7 +147,7 @@ def with_raw_response(self) -> AsyncVersionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AsyncVersionsResourceWithRawResponse(self) @@ -152,7 +156,7 @@ def with_streaming_response(self) -> AsyncVersionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AsyncVersionsResourceWithStreamingResponse(self) @@ -185,7 +189,9 @@ async def update( if not path_uuid: raise ValueError(f"Expected a non-empty value for `path_uuid` but received {path_uuid!r}") return await self._put( - f"/v2/gen-ai/agents/{path_uuid}/versions", + f"/v2/gen-ai/agents/{path_uuid}/versions" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{path_uuid}/versions", body=await async_maybe_transform( { "body_uuid": body_uuid, @@ -232,7 +238,9 @@ async def list( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return await self._get( - f"/v2/gen-ai/agents/{uuid}/versions", + f"/v2/gen-ai/agents/{uuid}/versions" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/agents/{uuid}/versions", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, diff --git a/src/gradientai/resources/chat/__init__.py b/src/gradientai/resources/chat/__init__.py new file mode 100644 index 00000000..ec960eb4 --- /dev/null +++ b/src/gradientai/resources/chat/__init__.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .chat import ( + ChatResource, + AsyncChatResource, + ChatResourceWithRawResponse, + AsyncChatResourceWithRawResponse, + ChatResourceWithStreamingResponse, + AsyncChatResourceWithStreamingResponse, +) +from .completions import ( + CompletionsResource, + AsyncCompletionsResource, + CompletionsResourceWithRawResponse, + AsyncCompletionsResourceWithRawResponse, + CompletionsResourceWithStreamingResponse, + AsyncCompletionsResourceWithStreamingResponse, +) + +__all__ = [ + "CompletionsResource", + "AsyncCompletionsResource", + "CompletionsResourceWithRawResponse", + "AsyncCompletionsResourceWithRawResponse", + "CompletionsResourceWithStreamingResponse", + "AsyncCompletionsResourceWithStreamingResponse", + "ChatResource", + "AsyncChatResource", + "ChatResourceWithRawResponse", + "AsyncChatResourceWithRawResponse", + "ChatResourceWithStreamingResponse", + "AsyncChatResourceWithStreamingResponse", +] diff --git a/src/gradientai/resources/chat/chat.py b/src/gradientai/resources/chat/chat.py new file mode 100644 index 00000000..6fa2925d --- /dev/null +++ b/src/gradientai/resources/chat/chat.py @@ -0,0 +1,102 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from .completions import ( + CompletionsResource, + AsyncCompletionsResource, + CompletionsResourceWithRawResponse, + AsyncCompletionsResourceWithRawResponse, + CompletionsResourceWithStreamingResponse, + AsyncCompletionsResourceWithStreamingResponse, +) + +__all__ = ["ChatResource", "AsyncChatResource"] + + +class ChatResource(SyncAPIResource): + @cached_property + def completions(self) -> CompletionsResource: + return CompletionsResource(self._client) + + @cached_property + def with_raw_response(self) -> ChatResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + """ + return ChatResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> ChatResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + """ + return ChatResourceWithStreamingResponse(self) + + +class AsyncChatResource(AsyncAPIResource): + @cached_property + def completions(self) -> AsyncCompletionsResource: + return AsyncCompletionsResource(self._client) + + @cached_property + def with_raw_response(self) -> AsyncChatResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + """ + return AsyncChatResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncChatResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + """ + return AsyncChatResourceWithStreamingResponse(self) + + +class ChatResourceWithRawResponse: + def __init__(self, chat: ChatResource) -> None: + self._chat = chat + + @cached_property + def completions(self) -> CompletionsResourceWithRawResponse: + return CompletionsResourceWithRawResponse(self._chat.completions) + + +class AsyncChatResourceWithRawResponse: + def __init__(self, chat: AsyncChatResource) -> None: + self._chat = chat + + @cached_property + def completions(self) -> AsyncCompletionsResourceWithRawResponse: + return AsyncCompletionsResourceWithRawResponse(self._chat.completions) + + +class ChatResourceWithStreamingResponse: + def __init__(self, chat: ChatResource) -> None: + self._chat = chat + + @cached_property + def completions(self) -> CompletionsResourceWithStreamingResponse: + return CompletionsResourceWithStreamingResponse(self._chat.completions) + + +class AsyncChatResourceWithStreamingResponse: + def __init__(self, chat: AsyncChatResource) -> None: + self._chat = chat + + @cached_property + def completions(self) -> AsyncCompletionsResourceWithStreamingResponse: + return AsyncCompletionsResourceWithStreamingResponse(self._chat.completions) diff --git a/src/digitalocean_genai_sdk/resources/chat.py b/src/gradientai/resources/chat/completions.py similarity index 81% rename from src/digitalocean_genai_sdk/resources/chat.py rename to src/gradientai/resources/chat/completions.py index 518fbad8..2d7c94c3 100644 --- a/src/digitalocean_genai_sdk/resources/chat.py +++ b/src/gradientai/resources/chat/completions.py @@ -6,47 +6,47 @@ import httpx -from ..types import chat_create_completion_params -from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven -from .._utils import maybe_transform, async_maybe_transform -from .._compat import cached_property -from .._resource import SyncAPIResource, AsyncAPIResource -from .._response import ( +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( to_raw_response_wrapper, to_streamed_response_wrapper, async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from .._base_client import make_request_options -from ..types.chat_create_completion_response import ChatCreateCompletionResponse +from ...types.chat import completion_create_params +from ..._base_client import make_request_options +from ...types.chat.completion_create_response import CompletionCreateResponse -__all__ = ["ChatResource", "AsyncChatResource"] +__all__ = ["CompletionsResource", "AsyncCompletionsResource"] -class ChatResource(SyncAPIResource): +class CompletionsResource(SyncAPIResource): @cached_property - def with_raw_response(self) -> ChatResourceWithRawResponse: + def with_raw_response(self) -> CompletionsResourceWithRawResponse: """ This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ - return ChatResourceWithRawResponse(self) + return CompletionsResourceWithRawResponse(self) @cached_property - def with_streaming_response(self) -> ChatResourceWithStreamingResponse: + def with_streaming_response(self) -> CompletionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ - return ChatResourceWithStreamingResponse(self) + return CompletionsResourceWithStreamingResponse(self) - def create_completion( + def create( self, *, - messages: Iterable[chat_create_completion_params.Message], + messages: Iterable[completion_create_params.Message], model: str, frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, @@ -58,7 +58,7 @@ def create_completion( presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, stream: Optional[bool] | NotGiven = NOT_GIVEN, - stream_options: Optional[chat_create_completion_params.StreamOptions] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, top_p: Optional[float] | NotGiven = NOT_GIVEN, @@ -69,7 +69,7 @@ def create_completion( extra_query: Query | None = None, extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ChatCreateCompletionResponse: + ) -> CompletionCreateResponse: """ Creates a model response for the given chat conversation. @@ -154,7 +154,9 @@ def create_completion( timeout: Override the client-level default timeout for this request, in seconds """ return self._post( - "/chat/completions", + "/chat/completions" + if self._client._base_url_overridden + else "https://inference.do-ai.run/v1/chat/completions", body=maybe_transform( { "messages": messages, @@ -175,39 +177,39 @@ def create_completion( "top_p": top_p, "user": user, }, - chat_create_completion_params.ChatCreateCompletionParams, + completion_create_params.CompletionCreateParams, ), options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), - cast_to=ChatCreateCompletionResponse, + cast_to=CompletionCreateResponse, ) -class AsyncChatResource(AsyncAPIResource): +class AsyncCompletionsResource(AsyncAPIResource): @cached_property - def with_raw_response(self) -> AsyncChatResourceWithRawResponse: + def with_raw_response(self) -> AsyncCompletionsResourceWithRawResponse: """ This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ - return AsyncChatResourceWithRawResponse(self) + return AsyncCompletionsResourceWithRawResponse(self) @cached_property - def with_streaming_response(self) -> AsyncChatResourceWithStreamingResponse: + def with_streaming_response(self) -> AsyncCompletionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ - return AsyncChatResourceWithStreamingResponse(self) + return AsyncCompletionsResourceWithStreamingResponse(self) - async def create_completion( + async def create( self, *, - messages: Iterable[chat_create_completion_params.Message], + messages: Iterable[completion_create_params.Message], model: str, frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, @@ -219,7 +221,7 @@ async def create_completion( presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, stream: Optional[bool] | NotGiven = NOT_GIVEN, - stream_options: Optional[chat_create_completion_params.StreamOptions] | NotGiven = NOT_GIVEN, + stream_options: Optional[completion_create_params.StreamOptions] | NotGiven = NOT_GIVEN, temperature: Optional[float] | NotGiven = NOT_GIVEN, top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, top_p: Optional[float] | NotGiven = NOT_GIVEN, @@ -230,7 +232,7 @@ async def create_completion( extra_query: Query | None = None, extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ChatCreateCompletionResponse: + ) -> CompletionCreateResponse: """ Creates a model response for the given chat conversation. @@ -315,7 +317,9 @@ async def create_completion( timeout: Override the client-level default timeout for this request, in seconds """ return await self._post( - "/chat/completions", + "/chat/completions" + if self._client._base_url_overridden + else "https://inference.do-ai.run/v1/chat/completions", body=await async_maybe_transform( { "messages": messages, @@ -336,46 +340,46 @@ async def create_completion( "top_p": top_p, "user": user, }, - chat_create_completion_params.ChatCreateCompletionParams, + completion_create_params.CompletionCreateParams, ), options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), - cast_to=ChatCreateCompletionResponse, + cast_to=CompletionCreateResponse, ) -class ChatResourceWithRawResponse: - def __init__(self, chat: ChatResource) -> None: - self._chat = chat +class CompletionsResourceWithRawResponse: + def __init__(self, completions: CompletionsResource) -> None: + self._completions = completions - self.create_completion = to_raw_response_wrapper( - chat.create_completion, + self.create = to_raw_response_wrapper( + completions.create, ) -class AsyncChatResourceWithRawResponse: - def __init__(self, chat: AsyncChatResource) -> None: - self._chat = chat +class AsyncCompletionsResourceWithRawResponse: + def __init__(self, completions: AsyncCompletionsResource) -> None: + self._completions = completions - self.create_completion = async_to_raw_response_wrapper( - chat.create_completion, + self.create = async_to_raw_response_wrapper( + completions.create, ) -class ChatResourceWithStreamingResponse: - def __init__(self, chat: ChatResource) -> None: - self._chat = chat +class CompletionsResourceWithStreamingResponse: + def __init__(self, completions: CompletionsResource) -> None: + self._completions = completions - self.create_completion = to_streamed_response_wrapper( - chat.create_completion, + self.create = to_streamed_response_wrapper( + completions.create, ) -class AsyncChatResourceWithStreamingResponse: - def __init__(self, chat: AsyncChatResource) -> None: - self._chat = chat +class AsyncCompletionsResourceWithStreamingResponse: + def __init__(self, completions: AsyncCompletionsResource) -> None: + self._completions = completions - self.create_completion = async_to_streamed_response_wrapper( - chat.create_completion, + self.create = async_to_streamed_response_wrapper( + completions.create, ) diff --git a/src/digitalocean_genai_sdk/resources/indexing_jobs.py b/src/gradientai/resources/indexing_jobs.py similarity index 90% rename from src/digitalocean_genai_sdk/resources/indexing_jobs.py rename to src/gradientai/resources/indexing_jobs.py index 7649a7a7..71c59023 100644 --- a/src/digitalocean_genai_sdk/resources/indexing_jobs.py +++ b/src/gradientai/resources/indexing_jobs.py @@ -34,7 +34,7 @@ def with_raw_response(self) -> IndexingJobsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return IndexingJobsResourceWithRawResponse(self) @@ -43,7 +43,7 @@ def with_streaming_response(self) -> IndexingJobsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return IndexingJobsResourceWithStreamingResponse(self) @@ -73,7 +73,9 @@ def create( timeout: Override the client-level default timeout for this request, in seconds """ return self._post( - "/v2/genai/indexing_jobs", + "/v2/gen-ai/indexing_jobs" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/indexing_jobs", body=maybe_transform( { "data_source_uuids": data_source_uuids, @@ -114,7 +116,9 @@ def retrieve( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return self._get( - f"/v2/genai/indexing_jobs/{uuid}", + f"/v2/gen-ai/indexing_jobs/{uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/indexing_jobs/{uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -151,7 +155,9 @@ def list( timeout: Override the client-level default timeout for this request, in seconds """ return self._get( - "/v2/genai/indexing_jobs", + "/v2/gen-ai/indexing_jobs" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/indexing_jobs", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -195,7 +201,9 @@ def retrieve_data_sources( if not indexing_job_uuid: raise ValueError(f"Expected a non-empty value for `indexing_job_uuid` but received {indexing_job_uuid!r}") return self._get( - f"/v2/genai/indexing_jobs/{indexing_job_uuid}/data_sources", + f"/v2/gen-ai/indexing_jobs/{indexing_job_uuid}/data_sources" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/indexing_jobs/{indexing_job_uuid}/data_sources", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -232,7 +240,9 @@ def update_cancel( if not path_uuid: raise ValueError(f"Expected a non-empty value for `path_uuid` but received {path_uuid!r}") return self._put( - f"/v2/genai/indexing_jobs/{path_uuid}/cancel", + f"/v2/gen-ai/indexing_jobs/{path_uuid}/cancel" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/indexing_jobs/{path_uuid}/cancel", body=maybe_transform( {"body_uuid": body_uuid}, indexing_job_update_cancel_params.IndexingJobUpdateCancelParams ), @@ -250,7 +260,7 @@ def with_raw_response(self) -> AsyncIndexingJobsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AsyncIndexingJobsResourceWithRawResponse(self) @@ -259,7 +269,7 @@ def with_streaming_response(self) -> AsyncIndexingJobsResourceWithStreamingRespo """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AsyncIndexingJobsResourceWithStreamingResponse(self) @@ -289,7 +299,9 @@ async def create( timeout: Override the client-level default timeout for this request, in seconds """ return await self._post( - "/v2/genai/indexing_jobs", + "/v2/gen-ai/indexing_jobs" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/indexing_jobs", body=await async_maybe_transform( { "data_source_uuids": data_source_uuids, @@ -330,7 +342,9 @@ async def retrieve( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return await self._get( - f"/v2/genai/indexing_jobs/{uuid}", + f"/v2/gen-ai/indexing_jobs/{uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/indexing_jobs/{uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -367,7 +381,9 @@ async def list( timeout: Override the client-level default timeout for this request, in seconds """ return await self._get( - "/v2/genai/indexing_jobs", + "/v2/gen-ai/indexing_jobs" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/indexing_jobs", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -411,7 +427,9 @@ async def retrieve_data_sources( if not indexing_job_uuid: raise ValueError(f"Expected a non-empty value for `indexing_job_uuid` but received {indexing_job_uuid!r}") return await self._get( - f"/v2/genai/indexing_jobs/{indexing_job_uuid}/data_sources", + f"/v2/gen-ai/indexing_jobs/{indexing_job_uuid}/data_sources" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/indexing_jobs/{indexing_job_uuid}/data_sources", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -448,7 +466,9 @@ async def update_cancel( if not path_uuid: raise ValueError(f"Expected a non-empty value for `path_uuid` but received {path_uuid!r}") return await self._put( - f"/v2/genai/indexing_jobs/{path_uuid}/cancel", + f"/v2/gen-ai/indexing_jobs/{path_uuid}/cancel" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/indexing_jobs/{path_uuid}/cancel", body=await async_maybe_transform( {"body_uuid": body_uuid}, indexing_job_update_cancel_params.IndexingJobUpdateCancelParams ), diff --git a/src/gradientai/resources/inference/__init__.py b/src/gradientai/resources/inference/__init__.py new file mode 100644 index 00000000..0e5631ce --- /dev/null +++ b/src/gradientai/resources/inference/__init__.py @@ -0,0 +1,47 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .models import ( + ModelsResource, + AsyncModelsResource, + ModelsResourceWithRawResponse, + AsyncModelsResourceWithRawResponse, + ModelsResourceWithStreamingResponse, + AsyncModelsResourceWithStreamingResponse, +) +from .api_keys import ( + APIKeysResource, + AsyncAPIKeysResource, + APIKeysResourceWithRawResponse, + AsyncAPIKeysResourceWithRawResponse, + APIKeysResourceWithStreamingResponse, + AsyncAPIKeysResourceWithStreamingResponse, +) +from .inference import ( + InferenceResource, + AsyncInferenceResource, + InferenceResourceWithRawResponse, + AsyncInferenceResourceWithRawResponse, + InferenceResourceWithStreamingResponse, + AsyncInferenceResourceWithStreamingResponse, +) + +__all__ = [ + "APIKeysResource", + "AsyncAPIKeysResource", + "APIKeysResourceWithRawResponse", + "AsyncAPIKeysResourceWithRawResponse", + "APIKeysResourceWithStreamingResponse", + "AsyncAPIKeysResourceWithStreamingResponse", + "ModelsResource", + "AsyncModelsResource", + "ModelsResourceWithRawResponse", + "AsyncModelsResourceWithRawResponse", + "ModelsResourceWithStreamingResponse", + "AsyncModelsResourceWithStreamingResponse", + "InferenceResource", + "AsyncInferenceResource", + "InferenceResourceWithRawResponse", + "AsyncInferenceResourceWithRawResponse", + "InferenceResourceWithStreamingResponse", + "AsyncInferenceResourceWithStreamingResponse", +] diff --git a/src/digitalocean_genai_sdk/resources/api_keys/api_keys_.py b/src/gradientai/resources/inference/api_keys.py similarity index 88% rename from src/digitalocean_genai_sdk/resources/api_keys/api_keys_.py rename to src/gradientai/resources/inference/api_keys.py index 70b1147a..6759d09c 100644 --- a/src/digitalocean_genai_sdk/resources/api_keys/api_keys_.py +++ b/src/gradientai/resources/inference/api_keys.py @@ -15,12 +15,12 @@ async_to_streamed_response_wrapper, ) from ..._base_client import make_request_options -from ...types.api_keys import api_key_list_params, api_key_create_params, api_key_update_params -from ...types.api_keys.api_key_list_response import APIKeyListResponse -from ...types.api_keys.api_key_create_response import APIKeyCreateResponse -from ...types.api_keys.api_key_delete_response import APIKeyDeleteResponse -from ...types.api_keys.api_key_update_response import APIKeyUpdateResponse -from ...types.api_keys.api_key_update_regenerate_response import APIKeyUpdateRegenerateResponse +from ...types.inference import api_key_list_params, api_key_create_params, api_key_update_params +from ...types.inference.api_key_list_response import APIKeyListResponse +from ...types.inference.api_key_create_response import APIKeyCreateResponse +from ...types.inference.api_key_delete_response import APIKeyDeleteResponse +from ...types.inference.api_key_update_response import APIKeyUpdateResponse +from ...types.inference.api_key_update_regenerate_response import APIKeyUpdateRegenerateResponse __all__ = ["APIKeysResource", "AsyncAPIKeysResource"] @@ -32,7 +32,7 @@ def with_raw_response(self) -> APIKeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return APIKeysResourceWithRawResponse(self) @@ -41,7 +41,7 @@ def with_streaming_response(self) -> APIKeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return APIKeysResourceWithStreamingResponse(self) @@ -69,7 +69,9 @@ def create( timeout: Override the client-level default timeout for this request, in seconds """ return self._post( - "/v2/genai/models/api_keys", + "/v2/gen-ai/models/api_keys" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/models/api_keys", body=maybe_transform({"name": name}, api_key_create_params.APIKeyCreateParams), options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout @@ -106,7 +108,9 @@ def update( if not path_api_key_uuid: raise ValueError(f"Expected a non-empty value for `path_api_key_uuid` but received {path_api_key_uuid!r}") return self._put( - f"/v2/genai/models/api_keys/{path_api_key_uuid}", + f"/v2/gen-ai/models/api_keys/{path_api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/models/api_keys/{path_api_key_uuid}", body=maybe_transform( { "body_api_key_uuid": body_api_key_uuid, @@ -149,7 +153,9 @@ def list( timeout: Override the client-level default timeout for this request, in seconds """ return self._get( - "/v2/genai/models/api_keys", + "/v2/gen-ai/models/api_keys" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/models/api_keys", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -193,7 +199,9 @@ def delete( if not api_key_uuid: raise ValueError(f"Expected a non-empty value for `api_key_uuid` but received {api_key_uuid!r}") return self._delete( - f"/v2/genai/models/api_keys/{api_key_uuid}", + f"/v2/gen-ai/models/api_keys/{api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/models/api_keys/{api_key_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -227,7 +235,9 @@ def update_regenerate( if not api_key_uuid: raise ValueError(f"Expected a non-empty value for `api_key_uuid` but received {api_key_uuid!r}") return self._put( - f"/v2/genai/models/api_keys/{api_key_uuid}/regenerate", + f"/v2/gen-ai/models/api_keys/{api_key_uuid}/regenerate" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/models/api_keys/{api_key_uuid}/regenerate", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -242,7 +252,7 @@ def with_raw_response(self) -> AsyncAPIKeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AsyncAPIKeysResourceWithRawResponse(self) @@ -251,7 +261,7 @@ def with_streaming_response(self) -> AsyncAPIKeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AsyncAPIKeysResourceWithStreamingResponse(self) @@ -279,7 +289,9 @@ async def create( timeout: Override the client-level default timeout for this request, in seconds """ return await self._post( - "/v2/genai/models/api_keys", + "/v2/gen-ai/models/api_keys" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/models/api_keys", body=await async_maybe_transform({"name": name}, api_key_create_params.APIKeyCreateParams), options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout @@ -316,7 +328,9 @@ async def update( if not path_api_key_uuid: raise ValueError(f"Expected a non-empty value for `path_api_key_uuid` but received {path_api_key_uuid!r}") return await self._put( - f"/v2/genai/models/api_keys/{path_api_key_uuid}", + f"/v2/gen-ai/models/api_keys/{path_api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/models/api_keys/{path_api_key_uuid}", body=await async_maybe_transform( { "body_api_key_uuid": body_api_key_uuid, @@ -359,7 +373,9 @@ async def list( timeout: Override the client-level default timeout for this request, in seconds """ return await self._get( - "/v2/genai/models/api_keys", + "/v2/gen-ai/models/api_keys" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/models/api_keys", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -403,7 +419,9 @@ async def delete( if not api_key_uuid: raise ValueError(f"Expected a non-empty value for `api_key_uuid` but received {api_key_uuid!r}") return await self._delete( - f"/v2/genai/models/api_keys/{api_key_uuid}", + f"/v2/gen-ai/models/api_keys/{api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/models/api_keys/{api_key_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -437,7 +455,9 @@ async def update_regenerate( if not api_key_uuid: raise ValueError(f"Expected a non-empty value for `api_key_uuid` but received {api_key_uuid!r}") return await self._put( - f"/v2/genai/models/api_keys/{api_key_uuid}/regenerate", + f"/v2/gen-ai/models/api_keys/{api_key_uuid}/regenerate" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/models/api_keys/{api_key_uuid}/regenerate", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), diff --git a/src/gradientai/resources/inference/inference.py b/src/gradientai/resources/inference/inference.py new file mode 100644 index 00000000..209d6f17 --- /dev/null +++ b/src/gradientai/resources/inference/inference.py @@ -0,0 +1,134 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .models import ( + ModelsResource, + AsyncModelsResource, + ModelsResourceWithRawResponse, + AsyncModelsResourceWithRawResponse, + ModelsResourceWithStreamingResponse, + AsyncModelsResourceWithStreamingResponse, +) +from .api_keys import ( + APIKeysResource, + AsyncAPIKeysResource, + APIKeysResourceWithRawResponse, + AsyncAPIKeysResourceWithRawResponse, + APIKeysResourceWithStreamingResponse, + AsyncAPIKeysResourceWithStreamingResponse, +) +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource + +__all__ = ["InferenceResource", "AsyncInferenceResource"] + + +class InferenceResource(SyncAPIResource): + @cached_property + def api_keys(self) -> APIKeysResource: + return APIKeysResource(self._client) + + @cached_property + def models(self) -> ModelsResource: + return ModelsResource(self._client) + + @cached_property + def with_raw_response(self) -> InferenceResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + """ + return InferenceResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> InferenceResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + """ + return InferenceResourceWithStreamingResponse(self) + + +class AsyncInferenceResource(AsyncAPIResource): + @cached_property + def api_keys(self) -> AsyncAPIKeysResource: + return AsyncAPIKeysResource(self._client) + + @cached_property + def models(self) -> AsyncModelsResource: + return AsyncModelsResource(self._client) + + @cached_property + def with_raw_response(self) -> AsyncInferenceResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + """ + return AsyncInferenceResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncInferenceResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + """ + return AsyncInferenceResourceWithStreamingResponse(self) + + +class InferenceResourceWithRawResponse: + def __init__(self, inference: InferenceResource) -> None: + self._inference = inference + + @cached_property + def api_keys(self) -> APIKeysResourceWithRawResponse: + return APIKeysResourceWithRawResponse(self._inference.api_keys) + + @cached_property + def models(self) -> ModelsResourceWithRawResponse: + return ModelsResourceWithRawResponse(self._inference.models) + + +class AsyncInferenceResourceWithRawResponse: + def __init__(self, inference: AsyncInferenceResource) -> None: + self._inference = inference + + @cached_property + def api_keys(self) -> AsyncAPIKeysResourceWithRawResponse: + return AsyncAPIKeysResourceWithRawResponse(self._inference.api_keys) + + @cached_property + def models(self) -> AsyncModelsResourceWithRawResponse: + return AsyncModelsResourceWithRawResponse(self._inference.models) + + +class InferenceResourceWithStreamingResponse: + def __init__(self, inference: InferenceResource) -> None: + self._inference = inference + + @cached_property + def api_keys(self) -> APIKeysResourceWithStreamingResponse: + return APIKeysResourceWithStreamingResponse(self._inference.api_keys) + + @cached_property + def models(self) -> ModelsResourceWithStreamingResponse: + return ModelsResourceWithStreamingResponse(self._inference.models) + + +class AsyncInferenceResourceWithStreamingResponse: + def __init__(self, inference: AsyncInferenceResource) -> None: + self._inference = inference + + @cached_property + def api_keys(self) -> AsyncAPIKeysResourceWithStreamingResponse: + return AsyncAPIKeysResourceWithStreamingResponse(self._inference.api_keys) + + @cached_property + def models(self) -> AsyncModelsResourceWithStreamingResponse: + return AsyncModelsResourceWithStreamingResponse(self._inference.models) diff --git a/src/digitalocean_genai_sdk/resources/models.py b/src/gradientai/resources/inference/models.py similarity index 87% rename from src/digitalocean_genai_sdk/resources/models.py rename to src/gradientai/resources/inference/models.py index 81b75441..42e1dcb2 100644 --- a/src/digitalocean_genai_sdk/resources/models.py +++ b/src/gradientai/resources/inference/models.py @@ -4,18 +4,18 @@ import httpx -from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven -from .._compat import cached_property -from .._resource import SyncAPIResource, AsyncAPIResource -from .._response import ( +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( to_raw_response_wrapper, to_streamed_response_wrapper, async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from ..types.model import Model -from .._base_client import make_request_options -from ..types.model_list_response import ModelListResponse +from ..._base_client import make_request_options +from ...types.inference.model import Model +from ...types.inference.model_list_response import ModelListResponse __all__ = ["ModelsResource", "AsyncModelsResource"] @@ -27,7 +27,7 @@ def with_raw_response(self) -> ModelsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return ModelsResourceWithRawResponse(self) @@ -36,7 +36,7 @@ def with_streaming_response(self) -> ModelsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return ModelsResourceWithStreamingResponse(self) @@ -67,7 +67,9 @@ def retrieve( if not model: raise ValueError(f"Expected a non-empty value for `model` but received {model!r}") return self._get( - f"/models/{model}", + f"/models/{model}" + if self._client._base_url_overridden + else f"https://inference.do-ai.run/v1/models/{model}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -89,7 +91,7 @@ def list( one such as the owner and availability. """ return self._get( - "/models", + "/models" if self._client._base_url_overridden else "https://inference.do-ai.run/v1/models", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -104,7 +106,7 @@ def with_raw_response(self) -> AsyncModelsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AsyncModelsResourceWithRawResponse(self) @@ -113,7 +115,7 @@ def with_streaming_response(self) -> AsyncModelsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AsyncModelsResourceWithStreamingResponse(self) @@ -144,7 +146,9 @@ async def retrieve( if not model: raise ValueError(f"Expected a non-empty value for `model` but received {model!r}") return await self._get( - f"/models/{model}", + f"/models/{model}" + if self._client._base_url_overridden + else f"https://inference.do-ai.run/v1/models/{model}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -166,7 +170,7 @@ async def list( one such as the owner and availability. """ return await self._get( - "/models", + "/models" if self._client._base_url_overridden else "https://inference.do-ai.run/v1/models", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), diff --git a/src/digitalocean_genai_sdk/resources/knowledge_bases/__init__.py b/src/gradientai/resources/knowledge_bases/__init__.py similarity index 100% rename from src/digitalocean_genai_sdk/resources/knowledge_bases/__init__.py rename to src/gradientai/resources/knowledge_bases/__init__.py diff --git a/src/digitalocean_genai_sdk/resources/knowledge_bases/data_sources.py b/src/gradientai/resources/knowledge_bases/data_sources.py similarity index 89% rename from src/digitalocean_genai_sdk/resources/knowledge_bases/data_sources.py rename to src/gradientai/resources/knowledge_bases/data_sources.py index b8a29c4a..e05696b9 100644 --- a/src/digitalocean_genai_sdk/resources/knowledge_bases/data_sources.py +++ b/src/gradientai/resources/knowledge_bases/data_sources.py @@ -19,6 +19,7 @@ data_source_list_params, data_source_create_params, ) +from ...types.knowledge_bases.aws_data_source_param import AwsDataSourceParam from ...types.knowledge_bases.data_source_list_response import DataSourceListResponse from ...types.knowledge_bases.data_source_create_response import DataSourceCreateResponse from ...types.knowledge_bases.data_source_delete_response import DataSourceDeleteResponse @@ -35,7 +36,7 @@ def with_raw_response(self) -> DataSourcesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return DataSourcesResourceWithRawResponse(self) @@ -44,7 +45,7 @@ def with_streaming_response(self) -> DataSourcesResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return DataSourcesResourceWithStreamingResponse(self) @@ -52,7 +53,7 @@ def create( self, path_knowledge_base_uuid: str, *, - aws_data_source: data_source_create_params.AwsDataSource | NotGiven = NOT_GIVEN, + aws_data_source: AwsDataSourceParam | NotGiven = NOT_GIVEN, body_knowledge_base_uuid: str | NotGiven = NOT_GIVEN, spaces_data_source: APISpacesDataSourceParam | NotGiven = NOT_GIVEN, web_crawler_data_source: APIWebCrawlerDataSourceParam | NotGiven = NOT_GIVEN, @@ -81,7 +82,9 @@ def create( f"Expected a non-empty value for `path_knowledge_base_uuid` but received {path_knowledge_base_uuid!r}" ) return self._post( - f"/v2/genai/knowledge_bases/{path_knowledge_base_uuid}/data_sources", + f"/v2/gen-ai/knowledge_bases/{path_knowledge_base_uuid}/data_sources" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/knowledge_bases/{path_knowledge_base_uuid}/data_sources", body=maybe_transform( { "aws_data_source": aws_data_source, @@ -132,7 +135,9 @@ def list( f"Expected a non-empty value for `knowledge_base_uuid` but received {knowledge_base_uuid!r}" ) return self._get( - f"/v2/genai/knowledge_bases/{knowledge_base_uuid}/data_sources", + f"/v2/gen-ai/knowledge_bases/{knowledge_base_uuid}/data_sources" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/knowledge_bases/{knowledge_base_uuid}/data_sources", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -181,7 +186,9 @@ def delete( if not data_source_uuid: raise ValueError(f"Expected a non-empty value for `data_source_uuid` but received {data_source_uuid!r}") return self._delete( - f"/v2/genai/knowledge_bases/{knowledge_base_uuid}/data_sources/{data_source_uuid}", + f"/v2/gen-ai/knowledge_bases/{knowledge_base_uuid}/data_sources/{data_source_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/knowledge_bases/{knowledge_base_uuid}/data_sources/{data_source_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -196,7 +203,7 @@ def with_raw_response(self) -> AsyncDataSourcesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AsyncDataSourcesResourceWithRawResponse(self) @@ -205,7 +212,7 @@ def with_streaming_response(self) -> AsyncDataSourcesResourceWithStreamingRespon """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AsyncDataSourcesResourceWithStreamingResponse(self) @@ -213,7 +220,7 @@ async def create( self, path_knowledge_base_uuid: str, *, - aws_data_source: data_source_create_params.AwsDataSource | NotGiven = NOT_GIVEN, + aws_data_source: AwsDataSourceParam | NotGiven = NOT_GIVEN, body_knowledge_base_uuid: str | NotGiven = NOT_GIVEN, spaces_data_source: APISpacesDataSourceParam | NotGiven = NOT_GIVEN, web_crawler_data_source: APIWebCrawlerDataSourceParam | NotGiven = NOT_GIVEN, @@ -242,7 +249,9 @@ async def create( f"Expected a non-empty value for `path_knowledge_base_uuid` but received {path_knowledge_base_uuid!r}" ) return await self._post( - f"/v2/genai/knowledge_bases/{path_knowledge_base_uuid}/data_sources", + f"/v2/gen-ai/knowledge_bases/{path_knowledge_base_uuid}/data_sources" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/knowledge_bases/{path_knowledge_base_uuid}/data_sources", body=await async_maybe_transform( { "aws_data_source": aws_data_source, @@ -293,7 +302,9 @@ async def list( f"Expected a non-empty value for `knowledge_base_uuid` but received {knowledge_base_uuid!r}" ) return await self._get( - f"/v2/genai/knowledge_bases/{knowledge_base_uuid}/data_sources", + f"/v2/gen-ai/knowledge_bases/{knowledge_base_uuid}/data_sources" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/knowledge_bases/{knowledge_base_uuid}/data_sources", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -342,7 +353,9 @@ async def delete( if not data_source_uuid: raise ValueError(f"Expected a non-empty value for `data_source_uuid` but received {data_source_uuid!r}") return await self._delete( - f"/v2/genai/knowledge_bases/{knowledge_base_uuid}/data_sources/{data_source_uuid}", + f"/v2/gen-ai/knowledge_bases/{knowledge_base_uuid}/data_sources/{data_source_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/knowledge_bases/{knowledge_base_uuid}/data_sources/{data_source_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), diff --git a/src/digitalocean_genai_sdk/resources/knowledge_bases/knowledge_bases.py b/src/gradientai/resources/knowledge_bases/knowledge_bases.py similarity index 92% rename from src/digitalocean_genai_sdk/resources/knowledge_bases/knowledge_bases.py rename to src/gradientai/resources/knowledge_bases/knowledge_bases.py index 713aca63..2cab4f7b 100644 --- a/src/digitalocean_genai_sdk/resources/knowledge_bases/knowledge_bases.py +++ b/src/gradientai/resources/knowledge_bases/knowledge_bases.py @@ -46,7 +46,7 @@ def with_raw_response(self) -> KnowledgeBasesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return KnowledgeBasesResourceWithRawResponse(self) @@ -55,7 +55,7 @@ def with_streaming_response(self) -> KnowledgeBasesResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return KnowledgeBasesResourceWithStreamingResponse(self) @@ -109,7 +109,9 @@ def create( timeout: Override the client-level default timeout for this request, in seconds """ return self._post( - "/v2/genai/knowledge_bases", + "/v2/gen-ai/knowledge_bases" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/knowledge_bases", body=maybe_transform( { "database_id": database_id, @@ -156,7 +158,9 @@ def retrieve( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return self._get( - f"/v2/genai/knowledge_bases/{uuid}", + f"/v2/gen-ai/knowledge_bases/{uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/knowledge_bases/{uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -202,7 +206,9 @@ def update( if not path_uuid: raise ValueError(f"Expected a non-empty value for `path_uuid` but received {path_uuid!r}") return self._put( - f"/v2/genai/knowledge_bases/{path_uuid}", + f"/v2/gen-ai/knowledge_bases/{path_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/knowledge_bases/{path_uuid}", body=maybe_transform( { "database_id": database_id, @@ -249,7 +255,9 @@ def list( timeout: Override the client-level default timeout for this request, in seconds """ return self._get( - "/v2/genai/knowledge_bases", + "/v2/gen-ai/knowledge_bases" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/knowledge_bases", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -293,7 +301,9 @@ def delete( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return self._delete( - f"/v2/genai/knowledge_bases/{uuid}", + f"/v2/gen-ai/knowledge_bases/{uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/knowledge_bases/{uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -312,7 +322,7 @@ def with_raw_response(self) -> AsyncKnowledgeBasesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AsyncKnowledgeBasesResourceWithRawResponse(self) @@ -321,7 +331,7 @@ def with_streaming_response(self) -> AsyncKnowledgeBasesResourceWithStreamingRes """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AsyncKnowledgeBasesResourceWithStreamingResponse(self) @@ -375,7 +385,9 @@ async def create( timeout: Override the client-level default timeout for this request, in seconds """ return await self._post( - "/v2/genai/knowledge_bases", + "/v2/gen-ai/knowledge_bases" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/knowledge_bases", body=await async_maybe_transform( { "database_id": database_id, @@ -422,7 +434,9 @@ async def retrieve( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return await self._get( - f"/v2/genai/knowledge_bases/{uuid}", + f"/v2/gen-ai/knowledge_bases/{uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/knowledge_bases/{uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -468,7 +482,9 @@ async def update( if not path_uuid: raise ValueError(f"Expected a non-empty value for `path_uuid` but received {path_uuid!r}") return await self._put( - f"/v2/genai/knowledge_bases/{path_uuid}", + f"/v2/gen-ai/knowledge_bases/{path_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/knowledge_bases/{path_uuid}", body=await async_maybe_transform( { "database_id": database_id, @@ -515,7 +531,9 @@ async def list( timeout: Override the client-level default timeout for this request, in seconds """ return await self._get( - "/v2/genai/knowledge_bases", + "/v2/gen-ai/knowledge_bases" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/knowledge_bases", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -559,7 +577,9 @@ async def delete( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return await self._delete( - f"/v2/genai/knowledge_bases/{uuid}", + f"/v2/gen-ai/knowledge_bases/{uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/knowledge_bases/{uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), diff --git a/src/digitalocean_genai_sdk/resources/api_keys/api_keys.py b/src/gradientai/resources/models.py similarity index 68% rename from src/digitalocean_genai_sdk/resources/api_keys/api_keys.py rename to src/gradientai/resources/models.py index 63091bcc..c8e78b9b 100644 --- a/src/digitalocean_genai_sdk/resources/api_keys/api_keys.py +++ b/src/gradientai/resources/models.py @@ -7,47 +7,42 @@ import httpx -from . import api_keys_ as api_keys -from ...types import api_key_list_params -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven -from ..._utils import maybe_transform, async_maybe_transform -from ..._compat import cached_property -from ..._resource import SyncAPIResource, AsyncAPIResource -from ..._response import ( +from ..types import model_list_params +from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from .._utils import maybe_transform, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( to_raw_response_wrapper, to_streamed_response_wrapper, async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from ..._base_client import make_request_options -from ...types.api_key_list_response import APIKeyListResponse +from .._base_client import make_request_options +from ..types.model_list_response import ModelListResponse -__all__ = ["APIKeysResource", "AsyncAPIKeysResource"] +__all__ = ["ModelsResource", "AsyncModelsResource"] -class APIKeysResource(SyncAPIResource): +class ModelsResource(SyncAPIResource): @cached_property - def api_keys(self) -> api_keys.APIKeysResource: - return api_keys.APIKeysResource(self._client) - - @cached_property - def with_raw_response(self) -> APIKeysResourceWithRawResponse: + def with_raw_response(self) -> ModelsResourceWithRawResponse: """ This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ - return APIKeysResourceWithRawResponse(self) + return ModelsResourceWithRawResponse(self) @cached_property - def with_streaming_response(self) -> APIKeysResourceWithStreamingResponse: + def with_streaming_response(self) -> ModelsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ - return APIKeysResourceWithStreamingResponse(self) + return ModelsResourceWithStreamingResponse(self) def list( self, @@ -73,7 +68,7 @@ def list( extra_query: Query | None = None, extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> APIKeyListResponse: + ) -> ModelListResponse: """ To list all models, send a GET request to `/v2/gen-ai/models`. @@ -104,7 +99,9 @@ def list( timeout: Override the client-level default timeout for this request, in seconds """ return self._get( - "/v2/genai/models", + "/v2/gen-ai/models" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/models", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -117,36 +114,32 @@ def list( "public_only": public_only, "usecases": usecases, }, - api_key_list_params.APIKeyListParams, + model_list_params.ModelListParams, ), ), - cast_to=APIKeyListResponse, + cast_to=ModelListResponse, ) -class AsyncAPIKeysResource(AsyncAPIResource): - @cached_property - def api_keys(self) -> api_keys.AsyncAPIKeysResource: - return api_keys.AsyncAPIKeysResource(self._client) - +class AsyncModelsResource(AsyncAPIResource): @cached_property - def with_raw_response(self) -> AsyncAPIKeysResourceWithRawResponse: + def with_raw_response(self) -> AsyncModelsResourceWithRawResponse: """ This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ - return AsyncAPIKeysResourceWithRawResponse(self) + return AsyncModelsResourceWithRawResponse(self) @cached_property - def with_streaming_response(self) -> AsyncAPIKeysResourceWithStreamingResponse: + def with_streaming_response(self) -> AsyncModelsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ - return AsyncAPIKeysResourceWithStreamingResponse(self) + return AsyncModelsResourceWithStreamingResponse(self) async def list( self, @@ -172,7 +165,7 @@ async def list( extra_query: Query | None = None, extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> APIKeyListResponse: + ) -> ModelListResponse: """ To list all models, send a GET request to `/v2/gen-ai/models`. @@ -203,7 +196,9 @@ async def list( timeout: Override the client-level default timeout for this request, in seconds """ return await self._get( - "/v2/genai/models", + "/v2/gen-ai/models" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/models", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -216,60 +211,44 @@ async def list( "public_only": public_only, "usecases": usecases, }, - api_key_list_params.APIKeyListParams, + model_list_params.ModelListParams, ), ), - cast_to=APIKeyListResponse, + cast_to=ModelListResponse, ) -class APIKeysResourceWithRawResponse: - def __init__(self, api_keys: APIKeysResource) -> None: - self._api_keys = api_keys +class ModelsResourceWithRawResponse: + def __init__(self, models: ModelsResource) -> None: + self._models = models self.list = to_raw_response_wrapper( - api_keys.list, + models.list, ) - @cached_property - def api_keys(self) -> api_keys.APIKeysResourceWithRawResponse: - return api_keys.APIKeysResourceWithRawResponse(self._api_keys.api_keys) - -class AsyncAPIKeysResourceWithRawResponse: - def __init__(self, api_keys: AsyncAPIKeysResource) -> None: - self._api_keys = api_keys +class AsyncModelsResourceWithRawResponse: + def __init__(self, models: AsyncModelsResource) -> None: + self._models = models self.list = async_to_raw_response_wrapper( - api_keys.list, + models.list, ) - @cached_property - def api_keys(self) -> api_keys.AsyncAPIKeysResourceWithRawResponse: - return api_keys.AsyncAPIKeysResourceWithRawResponse(self._api_keys.api_keys) - -class APIKeysResourceWithStreamingResponse: - def __init__(self, api_keys: APIKeysResource) -> None: - self._api_keys = api_keys +class ModelsResourceWithStreamingResponse: + def __init__(self, models: ModelsResource) -> None: + self._models = models self.list = to_streamed_response_wrapper( - api_keys.list, + models.list, ) - @cached_property - def api_keys(self) -> api_keys.APIKeysResourceWithStreamingResponse: - return api_keys.APIKeysResourceWithStreamingResponse(self._api_keys.api_keys) - -class AsyncAPIKeysResourceWithStreamingResponse: - def __init__(self, api_keys: AsyncAPIKeysResource) -> None: - self._api_keys = api_keys +class AsyncModelsResourceWithStreamingResponse: + def __init__(self, models: AsyncModelsResource) -> None: + self._models = models self.list = async_to_streamed_response_wrapper( - api_keys.list, + models.list, ) - - @cached_property - def api_keys(self) -> api_keys.AsyncAPIKeysResourceWithStreamingResponse: - return api_keys.AsyncAPIKeysResourceWithStreamingResponse(self._api_keys.api_keys) diff --git a/src/digitalocean_genai_sdk/resources/providers/__init__.py b/src/gradientai/resources/providers/__init__.py similarity index 100% rename from src/digitalocean_genai_sdk/resources/providers/__init__.py rename to src/gradientai/resources/providers/__init__.py diff --git a/src/digitalocean_genai_sdk/resources/providers/anthropic/__init__.py b/src/gradientai/resources/providers/anthropic/__init__.py similarity index 100% rename from src/digitalocean_genai_sdk/resources/providers/anthropic/__init__.py rename to src/gradientai/resources/providers/anthropic/__init__.py diff --git a/src/digitalocean_genai_sdk/resources/providers/anthropic/anthropic.py b/src/gradientai/resources/providers/anthropic/anthropic.py similarity index 93% rename from src/digitalocean_genai_sdk/resources/providers/anthropic/anthropic.py rename to src/gradientai/resources/providers/anthropic/anthropic.py index 64783563..23a914e9 100644 --- a/src/digitalocean_genai_sdk/resources/providers/anthropic/anthropic.py +++ b/src/gradientai/resources/providers/anthropic/anthropic.py @@ -27,7 +27,7 @@ def with_raw_response(self) -> AnthropicResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AnthropicResourceWithRawResponse(self) @@ -36,7 +36,7 @@ def with_streaming_response(self) -> AnthropicResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AnthropicResourceWithStreamingResponse(self) @@ -52,7 +52,7 @@ def with_raw_response(self) -> AsyncAnthropicResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AsyncAnthropicResourceWithRawResponse(self) @@ -61,7 +61,7 @@ def with_streaming_response(self) -> AsyncAnthropicResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AsyncAnthropicResourceWithStreamingResponse(self) diff --git a/src/digitalocean_genai_sdk/resources/providers/anthropic/keys.py b/src/gradientai/resources/providers/anthropic/keys.py similarity index 90% rename from src/digitalocean_genai_sdk/resources/providers/anthropic/keys.py rename to src/gradientai/resources/providers/anthropic/keys.py index 1f65a5ab..d1a33290 100644 --- a/src/digitalocean_genai_sdk/resources/providers/anthropic/keys.py +++ b/src/gradientai/resources/providers/anthropic/keys.py @@ -33,7 +33,7 @@ def with_raw_response(self) -> KeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return KeysResourceWithRawResponse(self) @@ -42,7 +42,7 @@ def with_streaming_response(self) -> KeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return KeysResourceWithStreamingResponse(self) @@ -72,7 +72,9 @@ def create( timeout: Override the client-level default timeout for this request, in seconds """ return self._post( - "/v2/genai/anthropic/keys", + "/v2/gen-ai/anthropic/keys" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/anthropic/keys", body=maybe_transform( { "api_key": api_key, @@ -113,7 +115,9 @@ def retrieve( if not api_key_uuid: raise ValueError(f"Expected a non-empty value for `api_key_uuid` but received {api_key_uuid!r}") return self._get( - f"/v2/genai/anthropic/keys/{api_key_uuid}", + f"/v2/gen-ai/anthropic/keys/{api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/anthropic/keys/{api_key_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -150,7 +154,9 @@ def update( if not path_api_key_uuid: raise ValueError(f"Expected a non-empty value for `path_api_key_uuid` but received {path_api_key_uuid!r}") return self._put( - f"/v2/genai/anthropic/keys/{path_api_key_uuid}", + f"/v2/gen-ai/anthropic/keys/{path_api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/anthropic/keys/{path_api_key_uuid}", body=maybe_transform( { "api_key": api_key, @@ -195,7 +201,9 @@ def list( timeout: Override the client-level default timeout for this request, in seconds """ return self._get( - "/v2/genai/anthropic/keys", + "/v2/gen-ai/anthropic/keys" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/anthropic/keys", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -239,7 +247,9 @@ def delete( if not api_key_uuid: raise ValueError(f"Expected a non-empty value for `api_key_uuid` but received {api_key_uuid!r}") return self._delete( - f"/v2/genai/anthropic/keys/{api_key_uuid}", + f"/v2/gen-ai/anthropic/keys/{api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/anthropic/keys/{api_key_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -278,7 +288,9 @@ def list_agents( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return self._get( - f"/v2/genai/anthropic/keys/{uuid}/agents", + f"/v2/gen-ai/anthropic/keys/{uuid}/agents" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/anthropic/keys/{uuid}/agents", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -303,7 +315,7 @@ def with_raw_response(self) -> AsyncKeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AsyncKeysResourceWithRawResponse(self) @@ -312,7 +324,7 @@ def with_streaming_response(self) -> AsyncKeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AsyncKeysResourceWithStreamingResponse(self) @@ -342,7 +354,9 @@ async def create( timeout: Override the client-level default timeout for this request, in seconds """ return await self._post( - "/v2/genai/anthropic/keys", + "/v2/gen-ai/anthropic/keys" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/anthropic/keys", body=await async_maybe_transform( { "api_key": api_key, @@ -383,7 +397,9 @@ async def retrieve( if not api_key_uuid: raise ValueError(f"Expected a non-empty value for `api_key_uuid` but received {api_key_uuid!r}") return await self._get( - f"/v2/genai/anthropic/keys/{api_key_uuid}", + f"/v2/gen-ai/anthropic/keys/{api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/anthropic/keys/{api_key_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -420,7 +436,9 @@ async def update( if not path_api_key_uuid: raise ValueError(f"Expected a non-empty value for `path_api_key_uuid` but received {path_api_key_uuid!r}") return await self._put( - f"/v2/genai/anthropic/keys/{path_api_key_uuid}", + f"/v2/gen-ai/anthropic/keys/{path_api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/anthropic/keys/{path_api_key_uuid}", body=await async_maybe_transform( { "api_key": api_key, @@ -465,7 +483,9 @@ async def list( timeout: Override the client-level default timeout for this request, in seconds """ return await self._get( - "/v2/genai/anthropic/keys", + "/v2/gen-ai/anthropic/keys" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/anthropic/keys", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -509,7 +529,9 @@ async def delete( if not api_key_uuid: raise ValueError(f"Expected a non-empty value for `api_key_uuid` but received {api_key_uuid!r}") return await self._delete( - f"/v2/genai/anthropic/keys/{api_key_uuid}", + f"/v2/gen-ai/anthropic/keys/{api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/anthropic/keys/{api_key_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -548,7 +570,9 @@ async def list_agents( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return await self._get( - f"/v2/genai/anthropic/keys/{uuid}/agents", + f"/v2/gen-ai/anthropic/keys/{uuid}/agents" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/anthropic/keys/{uuid}/agents", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, diff --git a/src/digitalocean_genai_sdk/resources/providers/openai/__init__.py b/src/gradientai/resources/providers/openai/__init__.py similarity index 100% rename from src/digitalocean_genai_sdk/resources/providers/openai/__init__.py rename to src/gradientai/resources/providers/openai/__init__.py diff --git a/src/digitalocean_genai_sdk/resources/providers/openai/keys.py b/src/gradientai/resources/providers/openai/keys.py similarity index 90% rename from src/digitalocean_genai_sdk/resources/providers/openai/keys.py rename to src/gradientai/resources/providers/openai/keys.py index 06e7a23c..01cfee75 100644 --- a/src/digitalocean_genai_sdk/resources/providers/openai/keys.py +++ b/src/gradientai/resources/providers/openai/keys.py @@ -33,7 +33,7 @@ def with_raw_response(self) -> KeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return KeysResourceWithRawResponse(self) @@ -42,7 +42,7 @@ def with_streaming_response(self) -> KeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return KeysResourceWithStreamingResponse(self) @@ -71,7 +71,9 @@ def create( timeout: Override the client-level default timeout for this request, in seconds """ return self._post( - "/v2/genai/openai/keys", + "/v2/gen-ai/openai/keys" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/openai/keys", body=maybe_transform( { "api_key": api_key, @@ -112,7 +114,9 @@ def retrieve( if not api_key_uuid: raise ValueError(f"Expected a non-empty value for `api_key_uuid` but received {api_key_uuid!r}") return self._get( - f"/v2/genai/openai/keys/{api_key_uuid}", + f"/v2/gen-ai/openai/keys/{api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/openai/keys/{api_key_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -149,7 +153,9 @@ def update( if not path_api_key_uuid: raise ValueError(f"Expected a non-empty value for `path_api_key_uuid` but received {path_api_key_uuid!r}") return self._put( - f"/v2/genai/openai/keys/{path_api_key_uuid}", + f"/v2/gen-ai/openai/keys/{path_api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/openai/keys/{path_api_key_uuid}", body=maybe_transform( { "api_key": api_key, @@ -193,7 +199,9 @@ def list( timeout: Override the client-level default timeout for this request, in seconds """ return self._get( - "/v2/genai/openai/keys", + "/v2/gen-ai/openai/keys" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/openai/keys", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -237,7 +245,9 @@ def delete( if not api_key_uuid: raise ValueError(f"Expected a non-empty value for `api_key_uuid` but received {api_key_uuid!r}") return self._delete( - f"/v2/genai/openai/keys/{api_key_uuid}", + f"/v2/gen-ai/openai/keys/{api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/openai/keys/{api_key_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -276,7 +286,9 @@ def retrieve_agents( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return self._get( - f"/v2/genai/openai/keys/{uuid}/agents", + f"/v2/gen-ai/openai/keys/{uuid}/agents" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/openai/keys/{uuid}/agents", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -301,7 +313,7 @@ def with_raw_response(self) -> AsyncKeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AsyncKeysResourceWithRawResponse(self) @@ -310,7 +322,7 @@ def with_streaming_response(self) -> AsyncKeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AsyncKeysResourceWithStreamingResponse(self) @@ -339,7 +351,9 @@ async def create( timeout: Override the client-level default timeout for this request, in seconds """ return await self._post( - "/v2/genai/openai/keys", + "/v2/gen-ai/openai/keys" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/openai/keys", body=await async_maybe_transform( { "api_key": api_key, @@ -380,7 +394,9 @@ async def retrieve( if not api_key_uuid: raise ValueError(f"Expected a non-empty value for `api_key_uuid` but received {api_key_uuid!r}") return await self._get( - f"/v2/genai/openai/keys/{api_key_uuid}", + f"/v2/gen-ai/openai/keys/{api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/openai/keys/{api_key_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -417,7 +433,9 @@ async def update( if not path_api_key_uuid: raise ValueError(f"Expected a non-empty value for `path_api_key_uuid` but received {path_api_key_uuid!r}") return await self._put( - f"/v2/genai/openai/keys/{path_api_key_uuid}", + f"/v2/gen-ai/openai/keys/{path_api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/openai/keys/{path_api_key_uuid}", body=await async_maybe_transform( { "api_key": api_key, @@ -461,7 +479,9 @@ async def list( timeout: Override the client-level default timeout for this request, in seconds """ return await self._get( - "/v2/genai/openai/keys", + "/v2/gen-ai/openai/keys" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/openai/keys", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, @@ -505,7 +525,9 @@ async def delete( if not api_key_uuid: raise ValueError(f"Expected a non-empty value for `api_key_uuid` but received {api_key_uuid!r}") return await self._delete( - f"/v2/genai/openai/keys/{api_key_uuid}", + f"/v2/gen-ai/openai/keys/{api_key_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/openai/keys/{api_key_uuid}", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -544,7 +566,9 @@ async def retrieve_agents( if not uuid: raise ValueError(f"Expected a non-empty value for `uuid` but received {uuid!r}") return await self._get( - f"/v2/genai/openai/keys/{uuid}/agents", + f"/v2/gen-ai/openai/keys/{uuid}/agents" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/openai/keys/{uuid}/agents", options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, diff --git a/src/digitalocean_genai_sdk/resources/providers/openai/openai.py b/src/gradientai/resources/providers/openai/openai.py similarity index 93% rename from src/digitalocean_genai_sdk/resources/providers/openai/openai.py rename to src/gradientai/resources/providers/openai/openai.py index d29fd062..b02dc2e1 100644 --- a/src/digitalocean_genai_sdk/resources/providers/openai/openai.py +++ b/src/gradientai/resources/providers/openai/openai.py @@ -27,7 +27,7 @@ def with_raw_response(self) -> OpenAIResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return OpenAIResourceWithRawResponse(self) @@ -36,7 +36,7 @@ def with_streaming_response(self) -> OpenAIResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return OpenAIResourceWithStreamingResponse(self) @@ -52,7 +52,7 @@ def with_raw_response(self) -> AsyncOpenAIResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AsyncOpenAIResourceWithRawResponse(self) @@ -61,7 +61,7 @@ def with_streaming_response(self) -> AsyncOpenAIResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AsyncOpenAIResourceWithStreamingResponse(self) diff --git a/src/digitalocean_genai_sdk/resources/providers/providers.py b/src/gradientai/resources/providers/providers.py similarity index 95% rename from src/digitalocean_genai_sdk/resources/providers/providers.py rename to src/gradientai/resources/providers/providers.py index 50e3db1a..ef942f73 100644 --- a/src/digitalocean_genai_sdk/resources/providers/providers.py +++ b/src/gradientai/resources/providers/providers.py @@ -39,7 +39,7 @@ def with_raw_response(self) -> ProvidersResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return ProvidersResourceWithRawResponse(self) @@ -48,7 +48,7 @@ def with_streaming_response(self) -> ProvidersResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return ProvidersResourceWithStreamingResponse(self) @@ -68,7 +68,7 @@ def with_raw_response(self) -> AsyncProvidersResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/genai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers """ return AsyncProvidersResourceWithRawResponse(self) @@ -77,7 +77,7 @@ def with_streaming_response(self) -> AsyncProvidersResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/genai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response """ return AsyncProvidersResourceWithStreamingResponse(self) diff --git a/src/gradientai/resources/regions/__init__.py b/src/gradientai/resources/regions/__init__.py new file mode 100644 index 00000000..51a96d61 --- /dev/null +++ b/src/gradientai/resources/regions/__init__.py @@ -0,0 +1,61 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .regions import ( + RegionsResource, + AsyncRegionsResource, + RegionsResourceWithRawResponse, + AsyncRegionsResourceWithRawResponse, + RegionsResourceWithStreamingResponse, + AsyncRegionsResourceWithStreamingResponse, +) +from .evaluation_runs import ( + EvaluationRunsResource, + AsyncEvaluationRunsResource, + EvaluationRunsResourceWithRawResponse, + AsyncEvaluationRunsResourceWithRawResponse, + EvaluationRunsResourceWithStreamingResponse, + AsyncEvaluationRunsResourceWithStreamingResponse, +) +from .evaluation_datasets import ( + EvaluationDatasetsResource, + AsyncEvaluationDatasetsResource, + EvaluationDatasetsResourceWithRawResponse, + AsyncEvaluationDatasetsResourceWithRawResponse, + EvaluationDatasetsResourceWithStreamingResponse, + AsyncEvaluationDatasetsResourceWithStreamingResponse, +) +from .evaluation_test_cases import ( + EvaluationTestCasesResource, + AsyncEvaluationTestCasesResource, + EvaluationTestCasesResourceWithRawResponse, + AsyncEvaluationTestCasesResourceWithRawResponse, + EvaluationTestCasesResourceWithStreamingResponse, + AsyncEvaluationTestCasesResourceWithStreamingResponse, +) + +__all__ = [ + "EvaluationRunsResource", + "AsyncEvaluationRunsResource", + "EvaluationRunsResourceWithRawResponse", + "AsyncEvaluationRunsResourceWithRawResponse", + "EvaluationRunsResourceWithStreamingResponse", + "AsyncEvaluationRunsResourceWithStreamingResponse", + "EvaluationTestCasesResource", + "AsyncEvaluationTestCasesResource", + "EvaluationTestCasesResourceWithRawResponse", + "AsyncEvaluationTestCasesResourceWithRawResponse", + "EvaluationTestCasesResourceWithStreamingResponse", + "AsyncEvaluationTestCasesResourceWithStreamingResponse", + "EvaluationDatasetsResource", + "AsyncEvaluationDatasetsResource", + "EvaluationDatasetsResourceWithRawResponse", + "AsyncEvaluationDatasetsResourceWithRawResponse", + "EvaluationDatasetsResourceWithStreamingResponse", + "AsyncEvaluationDatasetsResourceWithStreamingResponse", + "RegionsResource", + "AsyncRegionsResource", + "RegionsResourceWithRawResponse", + "AsyncRegionsResourceWithRawResponse", + "RegionsResourceWithStreamingResponse", + "AsyncRegionsResourceWithStreamingResponse", +] diff --git a/src/gradientai/resources/regions/evaluation_datasets.py b/src/gradientai/resources/regions/evaluation_datasets.py new file mode 100644 index 00000000..f82e9701 --- /dev/null +++ b/src/gradientai/resources/regions/evaluation_datasets.py @@ -0,0 +1,292 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Iterable + +import httpx + +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._base_client import make_request_options +from ...types.regions import ( + evaluation_dataset_create_params, + evaluation_dataset_create_file_upload_presigned_urls_params, +) +from ...types.regions.evaluation_dataset_create_response import EvaluationDatasetCreateResponse +from ...types.knowledge_bases.api_file_upload_data_source_param import APIFileUploadDataSourceParam +from ...types.regions.evaluation_dataset_create_file_upload_presigned_urls_response import ( + EvaluationDatasetCreateFileUploadPresignedURLsResponse, +) + +__all__ = ["EvaluationDatasetsResource", "AsyncEvaluationDatasetsResource"] + + +class EvaluationDatasetsResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> EvaluationDatasetsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + """ + return EvaluationDatasetsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> EvaluationDatasetsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + """ + return EvaluationDatasetsResourceWithStreamingResponse(self) + + def create( + self, + *, + file_upload_dataset: APIFileUploadDataSourceParam | NotGiven = NOT_GIVEN, + name: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationDatasetCreateResponse: + """ + To create an evaluation dataset, send a POST request to + `/v2/gen-ai/evaluation_datasets`. + + Args: + file_upload_dataset: File to upload as data source for knowledge base. + + name: The name of the agent evaluation dataset. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/v2/gen-ai/evaluation_datasets" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/evaluation_datasets", + body=maybe_transform( + { + "file_upload_dataset": file_upload_dataset, + "name": name, + }, + evaluation_dataset_create_params.EvaluationDatasetCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluationDatasetCreateResponse, + ) + + def create_file_upload_presigned_urls( + self, + *, + files: Iterable[evaluation_dataset_create_file_upload_presigned_urls_params.File] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationDatasetCreateFileUploadPresignedURLsResponse: + """ + To create presigned URLs for evaluation dataset file upload, send a POST request + to `/v2/gen-ai/evaluation_datasets/file_upload_presigned_urls`. + + Args: + files: A list of files to generate presigned URLs for. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/v2/gen-ai/evaluation_datasets/file_upload_presigned_urls" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/evaluation_datasets/file_upload_presigned_urls", + body=maybe_transform( + {"files": files}, + evaluation_dataset_create_file_upload_presigned_urls_params.EvaluationDatasetCreateFileUploadPresignedURLsParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluationDatasetCreateFileUploadPresignedURLsResponse, + ) + + +class AsyncEvaluationDatasetsResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncEvaluationDatasetsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + """ + return AsyncEvaluationDatasetsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncEvaluationDatasetsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + """ + return AsyncEvaluationDatasetsResourceWithStreamingResponse(self) + + async def create( + self, + *, + file_upload_dataset: APIFileUploadDataSourceParam | NotGiven = NOT_GIVEN, + name: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationDatasetCreateResponse: + """ + To create an evaluation dataset, send a POST request to + `/v2/gen-ai/evaluation_datasets`. + + Args: + file_upload_dataset: File to upload as data source for knowledge base. + + name: The name of the agent evaluation dataset. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/v2/gen-ai/evaluation_datasets" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/evaluation_datasets", + body=await async_maybe_transform( + { + "file_upload_dataset": file_upload_dataset, + "name": name, + }, + evaluation_dataset_create_params.EvaluationDatasetCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluationDatasetCreateResponse, + ) + + async def create_file_upload_presigned_urls( + self, + *, + files: Iterable[evaluation_dataset_create_file_upload_presigned_urls_params.File] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationDatasetCreateFileUploadPresignedURLsResponse: + """ + To create presigned URLs for evaluation dataset file upload, send a POST request + to `/v2/gen-ai/evaluation_datasets/file_upload_presigned_urls`. + + Args: + files: A list of files to generate presigned URLs for. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/v2/gen-ai/evaluation_datasets/file_upload_presigned_urls" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/evaluation_datasets/file_upload_presigned_urls", + body=await async_maybe_transform( + {"files": files}, + evaluation_dataset_create_file_upload_presigned_urls_params.EvaluationDatasetCreateFileUploadPresignedURLsParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluationDatasetCreateFileUploadPresignedURLsResponse, + ) + + +class EvaluationDatasetsResourceWithRawResponse: + def __init__(self, evaluation_datasets: EvaluationDatasetsResource) -> None: + self._evaluation_datasets = evaluation_datasets + + self.create = to_raw_response_wrapper( + evaluation_datasets.create, + ) + self.create_file_upload_presigned_urls = to_raw_response_wrapper( + evaluation_datasets.create_file_upload_presigned_urls, + ) + + +class AsyncEvaluationDatasetsResourceWithRawResponse: + def __init__(self, evaluation_datasets: AsyncEvaluationDatasetsResource) -> None: + self._evaluation_datasets = evaluation_datasets + + self.create = async_to_raw_response_wrapper( + evaluation_datasets.create, + ) + self.create_file_upload_presigned_urls = async_to_raw_response_wrapper( + evaluation_datasets.create_file_upload_presigned_urls, + ) + + +class EvaluationDatasetsResourceWithStreamingResponse: + def __init__(self, evaluation_datasets: EvaluationDatasetsResource) -> None: + self._evaluation_datasets = evaluation_datasets + + self.create = to_streamed_response_wrapper( + evaluation_datasets.create, + ) + self.create_file_upload_presigned_urls = to_streamed_response_wrapper( + evaluation_datasets.create_file_upload_presigned_urls, + ) + + +class AsyncEvaluationDatasetsResourceWithStreamingResponse: + def __init__(self, evaluation_datasets: AsyncEvaluationDatasetsResource) -> None: + self._evaluation_datasets = evaluation_datasets + + self.create = async_to_streamed_response_wrapper( + evaluation_datasets.create, + ) + self.create_file_upload_presigned_urls = async_to_streamed_response_wrapper( + evaluation_datasets.create_file_upload_presigned_urls, + ) diff --git a/src/gradientai/resources/regions/evaluation_runs/__init__.py b/src/gradientai/resources/regions/evaluation_runs/__init__.py new file mode 100644 index 00000000..e5580dd0 --- /dev/null +++ b/src/gradientai/resources/regions/evaluation_runs/__init__.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .results import ( + ResultsResource, + AsyncResultsResource, + ResultsResourceWithRawResponse, + AsyncResultsResourceWithRawResponse, + ResultsResourceWithStreamingResponse, + AsyncResultsResourceWithStreamingResponse, +) +from .evaluation_runs import ( + EvaluationRunsResource, + AsyncEvaluationRunsResource, + EvaluationRunsResourceWithRawResponse, + AsyncEvaluationRunsResourceWithRawResponse, + EvaluationRunsResourceWithStreamingResponse, + AsyncEvaluationRunsResourceWithStreamingResponse, +) + +__all__ = [ + "ResultsResource", + "AsyncResultsResource", + "ResultsResourceWithRawResponse", + "AsyncResultsResourceWithRawResponse", + "ResultsResourceWithStreamingResponse", + "AsyncResultsResourceWithStreamingResponse", + "EvaluationRunsResource", + "AsyncEvaluationRunsResource", + "EvaluationRunsResourceWithRawResponse", + "AsyncEvaluationRunsResourceWithRawResponse", + "EvaluationRunsResourceWithStreamingResponse", + "AsyncEvaluationRunsResourceWithStreamingResponse", +] diff --git a/src/gradientai/resources/regions/evaluation_runs/evaluation_runs.py b/src/gradientai/resources/regions/evaluation_runs/evaluation_runs.py new file mode 100644 index 00000000..9221c45c --- /dev/null +++ b/src/gradientai/resources/regions/evaluation_runs/evaluation_runs.py @@ -0,0 +1,316 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from .results import ( + ResultsResource, + AsyncResultsResource, + ResultsResourceWithRawResponse, + AsyncResultsResourceWithRawResponse, + ResultsResourceWithStreamingResponse, + AsyncResultsResourceWithStreamingResponse, +) +from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ...._utils import maybe_transform, async_maybe_transform +from ...._compat import cached_property +from ...._resource import SyncAPIResource, AsyncAPIResource +from ...._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ...._base_client import make_request_options +from ....types.regions import evaluation_run_create_params +from ....types.regions.evaluation_run_create_response import EvaluationRunCreateResponse +from ....types.regions.evaluation_run_retrieve_response import EvaluationRunRetrieveResponse + +__all__ = ["EvaluationRunsResource", "AsyncEvaluationRunsResource"] + + +class EvaluationRunsResource(SyncAPIResource): + @cached_property + def results(self) -> ResultsResource: + return ResultsResource(self._client) + + @cached_property + def with_raw_response(self) -> EvaluationRunsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + """ + return EvaluationRunsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> EvaluationRunsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + """ + return EvaluationRunsResourceWithStreamingResponse(self) + + def create( + self, + *, + agent_uuid: str | NotGiven = NOT_GIVEN, + run_name: str | NotGiven = NOT_GIVEN, + test_case_uuid: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationRunCreateResponse: + """ + To run an evaluation test case, send a POST request to + `/v2/gen-ai/evaluation_runs`. + + Args: + agent_uuid: Agent UUID to run the test case against. + + run_name: The name of the run. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/v2/gen-ai/evaluation_runs" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/evaluation_runs", + body=maybe_transform( + { + "agent_uuid": agent_uuid, + "run_name": run_name, + "test_case_uuid": test_case_uuid, + }, + evaluation_run_create_params.EvaluationRunCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluationRunCreateResponse, + ) + + def retrieve( + self, + evaluation_run_uuid: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationRunRetrieveResponse: + """ + To retrive information about an existing evaluation run, send a GET request to + `/v2/gen-ai/evaluation_runs/{evaluation_run_uuid}`. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not evaluation_run_uuid: + raise ValueError( + f"Expected a non-empty value for `evaluation_run_uuid` but received {evaluation_run_uuid!r}" + ) + return self._get( + f"/v2/gen-ai/evaluation_runs/{evaluation_run_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/evaluation_runs/{evaluation_run_uuid}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluationRunRetrieveResponse, + ) + + +class AsyncEvaluationRunsResource(AsyncAPIResource): + @cached_property + def results(self) -> AsyncResultsResource: + return AsyncResultsResource(self._client) + + @cached_property + def with_raw_response(self) -> AsyncEvaluationRunsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + """ + return AsyncEvaluationRunsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncEvaluationRunsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + """ + return AsyncEvaluationRunsResourceWithStreamingResponse(self) + + async def create( + self, + *, + agent_uuid: str | NotGiven = NOT_GIVEN, + run_name: str | NotGiven = NOT_GIVEN, + test_case_uuid: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationRunCreateResponse: + """ + To run an evaluation test case, send a POST request to + `/v2/gen-ai/evaluation_runs`. + + Args: + agent_uuid: Agent UUID to run the test case against. + + run_name: The name of the run. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/v2/gen-ai/evaluation_runs" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/evaluation_runs", + body=await async_maybe_transform( + { + "agent_uuid": agent_uuid, + "run_name": run_name, + "test_case_uuid": test_case_uuid, + }, + evaluation_run_create_params.EvaluationRunCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluationRunCreateResponse, + ) + + async def retrieve( + self, + evaluation_run_uuid: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationRunRetrieveResponse: + """ + To retrive information about an existing evaluation run, send a GET request to + `/v2/gen-ai/evaluation_runs/{evaluation_run_uuid}`. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not evaluation_run_uuid: + raise ValueError( + f"Expected a non-empty value for `evaluation_run_uuid` but received {evaluation_run_uuid!r}" + ) + return await self._get( + f"/v2/gen-ai/evaluation_runs/{evaluation_run_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/evaluation_runs/{evaluation_run_uuid}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluationRunRetrieveResponse, + ) + + +class EvaluationRunsResourceWithRawResponse: + def __init__(self, evaluation_runs: EvaluationRunsResource) -> None: + self._evaluation_runs = evaluation_runs + + self.create = to_raw_response_wrapper( + evaluation_runs.create, + ) + self.retrieve = to_raw_response_wrapper( + evaluation_runs.retrieve, + ) + + @cached_property + def results(self) -> ResultsResourceWithRawResponse: + return ResultsResourceWithRawResponse(self._evaluation_runs.results) + + +class AsyncEvaluationRunsResourceWithRawResponse: + def __init__(self, evaluation_runs: AsyncEvaluationRunsResource) -> None: + self._evaluation_runs = evaluation_runs + + self.create = async_to_raw_response_wrapper( + evaluation_runs.create, + ) + self.retrieve = async_to_raw_response_wrapper( + evaluation_runs.retrieve, + ) + + @cached_property + def results(self) -> AsyncResultsResourceWithRawResponse: + return AsyncResultsResourceWithRawResponse(self._evaluation_runs.results) + + +class EvaluationRunsResourceWithStreamingResponse: + def __init__(self, evaluation_runs: EvaluationRunsResource) -> None: + self._evaluation_runs = evaluation_runs + + self.create = to_streamed_response_wrapper( + evaluation_runs.create, + ) + self.retrieve = to_streamed_response_wrapper( + evaluation_runs.retrieve, + ) + + @cached_property + def results(self) -> ResultsResourceWithStreamingResponse: + return ResultsResourceWithStreamingResponse(self._evaluation_runs.results) + + +class AsyncEvaluationRunsResourceWithStreamingResponse: + def __init__(self, evaluation_runs: AsyncEvaluationRunsResource) -> None: + self._evaluation_runs = evaluation_runs + + self.create = async_to_streamed_response_wrapper( + evaluation_runs.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + evaluation_runs.retrieve, + ) + + @cached_property + def results(self) -> AsyncResultsResourceWithStreamingResponse: + return AsyncResultsResourceWithStreamingResponse(self._evaluation_runs.results) diff --git a/src/gradientai/resources/regions/evaluation_runs/results.py b/src/gradientai/resources/regions/evaluation_runs/results.py new file mode 100644 index 00000000..ad74a778 --- /dev/null +++ b/src/gradientai/resources/regions/evaluation_runs/results.py @@ -0,0 +1,264 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ...._compat import cached_property +from ...._resource import SyncAPIResource, AsyncAPIResource +from ...._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ...._base_client import make_request_options +from ....types.regions.evaluation_runs.result_retrieve_response import ResultRetrieveResponse +from ....types.regions.evaluation_runs.result_retrieve_prompt_response import ResultRetrievePromptResponse + +__all__ = ["ResultsResource", "AsyncResultsResource"] + + +class ResultsResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> ResultsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + """ + return ResultsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> ResultsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + """ + return ResultsResourceWithStreamingResponse(self) + + def retrieve( + self, + evaluation_run_uuid: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ResultRetrieveResponse: + """ + To retrieve results of an evaluation run, send a GET request to + `/v2/gen-ai/evaluation_runs/{evaluation_run_uuid}/results`. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not evaluation_run_uuid: + raise ValueError( + f"Expected a non-empty value for `evaluation_run_uuid` but received {evaluation_run_uuid!r}" + ) + return self._get( + f"/v2/gen-ai/evaluation_runs/{evaluation_run_uuid}/results" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/evaluation_runs/{evaluation_run_uuid}/results", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ResultRetrieveResponse, + ) + + def retrieve_prompt( + self, + prompt_id: int, + *, + evaluation_run_uuid: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ResultRetrievePromptResponse: + """ + To retrieve results of an evaluation run, send a GET request to + `/v2/genai/evaluation_runs/{evaluation_run_uuid}/results/{prompt_id}`. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not evaluation_run_uuid: + raise ValueError( + f"Expected a non-empty value for `evaluation_run_uuid` but received {evaluation_run_uuid!r}" + ) + return self._get( + f"/v2/gen-ai/evaluation_runs/{evaluation_run_uuid}/results/{prompt_id}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/evaluation_runs/{evaluation_run_uuid}/results/{prompt_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ResultRetrievePromptResponse, + ) + + +class AsyncResultsResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncResultsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + """ + return AsyncResultsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncResultsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + """ + return AsyncResultsResourceWithStreamingResponse(self) + + async def retrieve( + self, + evaluation_run_uuid: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ResultRetrieveResponse: + """ + To retrieve results of an evaluation run, send a GET request to + `/v2/gen-ai/evaluation_runs/{evaluation_run_uuid}/results`. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not evaluation_run_uuid: + raise ValueError( + f"Expected a non-empty value for `evaluation_run_uuid` but received {evaluation_run_uuid!r}" + ) + return await self._get( + f"/v2/gen-ai/evaluation_runs/{evaluation_run_uuid}/results" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/evaluation_runs/{evaluation_run_uuid}/results", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ResultRetrieveResponse, + ) + + async def retrieve_prompt( + self, + prompt_id: int, + *, + evaluation_run_uuid: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> ResultRetrievePromptResponse: + """ + To retrieve results of an evaluation run, send a GET request to + `/v2/genai/evaluation_runs/{evaluation_run_uuid}/results/{prompt_id}`. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not evaluation_run_uuid: + raise ValueError( + f"Expected a non-empty value for `evaluation_run_uuid` but received {evaluation_run_uuid!r}" + ) + return await self._get( + f"/v2/gen-ai/evaluation_runs/{evaluation_run_uuid}/results/{prompt_id}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/evaluation_runs/{evaluation_run_uuid}/results/{prompt_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=ResultRetrievePromptResponse, + ) + + +class ResultsResourceWithRawResponse: + def __init__(self, results: ResultsResource) -> None: + self._results = results + + self.retrieve = to_raw_response_wrapper( + results.retrieve, + ) + self.retrieve_prompt = to_raw_response_wrapper( + results.retrieve_prompt, + ) + + +class AsyncResultsResourceWithRawResponse: + def __init__(self, results: AsyncResultsResource) -> None: + self._results = results + + self.retrieve = async_to_raw_response_wrapper( + results.retrieve, + ) + self.retrieve_prompt = async_to_raw_response_wrapper( + results.retrieve_prompt, + ) + + +class ResultsResourceWithStreamingResponse: + def __init__(self, results: ResultsResource) -> None: + self._results = results + + self.retrieve = to_streamed_response_wrapper( + results.retrieve, + ) + self.retrieve_prompt = to_streamed_response_wrapper( + results.retrieve_prompt, + ) + + +class AsyncResultsResourceWithStreamingResponse: + def __init__(self, results: AsyncResultsResource) -> None: + self._results = results + + self.retrieve = async_to_streamed_response_wrapper( + results.retrieve, + ) + self.retrieve_prompt = async_to_streamed_response_wrapper( + results.retrieve_prompt, + ) diff --git a/src/gradientai/resources/regions/evaluation_test_cases.py b/src/gradientai/resources/regions/evaluation_test_cases.py new file mode 100644 index 00000000..eed4d8b4 --- /dev/null +++ b/src/gradientai/resources/regions/evaluation_test_cases.py @@ -0,0 +1,618 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List + +import httpx + +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._base_client import make_request_options +from ...types.regions import ( + evaluation_test_case_create_params, + evaluation_test_case_update_params, + evaluation_test_case_list_evaluation_runs_params, +) +from ...types.regions.api_star_metric_param import APIStarMetricParam +from ...types.regions.evaluation_test_case_list_response import EvaluationTestCaseListResponse +from ...types.regions.evaluation_test_case_create_response import EvaluationTestCaseCreateResponse +from ...types.regions.evaluation_test_case_update_response import EvaluationTestCaseUpdateResponse +from ...types.regions.evaluation_test_case_retrieve_response import EvaluationTestCaseRetrieveResponse +from ...types.regions.evaluation_test_case_list_evaluation_runs_response import ( + EvaluationTestCaseListEvaluationRunsResponse, +) + +__all__ = ["EvaluationTestCasesResource", "AsyncEvaluationTestCasesResource"] + + +class EvaluationTestCasesResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> EvaluationTestCasesResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + """ + return EvaluationTestCasesResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> EvaluationTestCasesResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + """ + return EvaluationTestCasesResourceWithStreamingResponse(self) + + def create( + self, + *, + dataset_uuid: str | NotGiven = NOT_GIVEN, + description: str | NotGiven = NOT_GIVEN, + metrics: List[str] | NotGiven = NOT_GIVEN, + name: str | NotGiven = NOT_GIVEN, + star_metric: APIStarMetricParam | NotGiven = NOT_GIVEN, + workspace_uuid: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationTestCaseCreateResponse: + """ + To create an evaluation test-case send a POST request to + `/v2/gen-ai/evaluation_test_cases`. + + Args: + dataset_uuid: Dataset against which the test‑case is executed. + + description: Description of the test case. + + metrics: Full metric list to use for evaluation test case. + + name: Name of the test case. + + workspace_uuid: The workspace uuid. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/v2/gen-ai/evaluation_test_cases" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/evaluation_test_cases", + body=maybe_transform( + { + "dataset_uuid": dataset_uuid, + "description": description, + "metrics": metrics, + "name": name, + "star_metric": star_metric, + "workspace_uuid": workspace_uuid, + }, + evaluation_test_case_create_params.EvaluationTestCaseCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluationTestCaseCreateResponse, + ) + + def retrieve( + self, + test_case_uuid: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationTestCaseRetrieveResponse: + """ + To retrive information about an existing evaluation test case, send a GET + request to `/v2/gen-ai/evaluation_test_case/{test_case_uuid}`. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not test_case_uuid: + raise ValueError(f"Expected a non-empty value for `test_case_uuid` but received {test_case_uuid!r}") + return self._get( + f"/v2/gen-ai/evaluation_test_cases/{test_case_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/evaluation_test_cases/{test_case_uuid}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluationTestCaseRetrieveResponse, + ) + + def update( + self, + path_test_case_uuid: str, + *, + dataset_uuid: str | NotGiven = NOT_GIVEN, + description: str | NotGiven = NOT_GIVEN, + metrics: evaluation_test_case_update_params.Metrics | NotGiven = NOT_GIVEN, + name: str | NotGiven = NOT_GIVEN, + star_metric: APIStarMetricParam | NotGiven = NOT_GIVEN, + body_test_case_uuid: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationTestCaseUpdateResponse: + """ + To update an evaluation test-case send a POST request to + `/v2/gen-ai/evaluation_test_cases/{test_case_uuid}`. + + Args: + dataset_uuid: Dataset against which the test‑case is executed. + + description: Description of the test case. + + name: Name of the test case. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not path_test_case_uuid: + raise ValueError( + f"Expected a non-empty value for `path_test_case_uuid` but received {path_test_case_uuid!r}" + ) + return self._post( + f"/v2/gen-ai/evaluation_test_cases/{path_test_case_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/evaluation_test_cases/{path_test_case_uuid}", + body=maybe_transform( + { + "dataset_uuid": dataset_uuid, + "description": description, + "metrics": metrics, + "name": name, + "star_metric": star_metric, + "body_test_case_uuid": body_test_case_uuid, + }, + evaluation_test_case_update_params.EvaluationTestCaseUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluationTestCaseUpdateResponse, + ) + + def list( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationTestCaseListResponse: + """ + To list all evaluation test cases, send a GET request to + `/v2/gen-ai/evaluation_test_cases`. + """ + return self._get( + "/v2/gen-ai/evaluation_test_cases" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/evaluation_test_cases", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluationTestCaseListResponse, + ) + + def list_evaluation_runs( + self, + evaluation_test_case_uuid: str, + *, + evaluation_test_case_version: int | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationTestCaseListEvaluationRunsResponse: + """ + To list all evaluation runs by test case, send a GET request to + `/v2/gen-ai/evaluation_test_cases/{evaluation_test_case_uuid}/evaluation_runs`. + + Args: + evaluation_test_case_version: Version of the test case. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not evaluation_test_case_uuid: + raise ValueError( + f"Expected a non-empty value for `evaluation_test_case_uuid` but received {evaluation_test_case_uuid!r}" + ) + return self._get( + f"/v2/gen-ai/evaluation_test_cases/{evaluation_test_case_uuid}/evaluation_runs" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/evaluation_test_cases/{evaluation_test_case_uuid}/evaluation_runs", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + {"evaluation_test_case_version": evaluation_test_case_version}, + evaluation_test_case_list_evaluation_runs_params.EvaluationTestCaseListEvaluationRunsParams, + ), + ), + cast_to=EvaluationTestCaseListEvaluationRunsResponse, + ) + + +class AsyncEvaluationTestCasesResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncEvaluationTestCasesResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + """ + return AsyncEvaluationTestCasesResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncEvaluationTestCasesResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + """ + return AsyncEvaluationTestCasesResourceWithStreamingResponse(self) + + async def create( + self, + *, + dataset_uuid: str | NotGiven = NOT_GIVEN, + description: str | NotGiven = NOT_GIVEN, + metrics: List[str] | NotGiven = NOT_GIVEN, + name: str | NotGiven = NOT_GIVEN, + star_metric: APIStarMetricParam | NotGiven = NOT_GIVEN, + workspace_uuid: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationTestCaseCreateResponse: + """ + To create an evaluation test-case send a POST request to + `/v2/gen-ai/evaluation_test_cases`. + + Args: + dataset_uuid: Dataset against which the test‑case is executed. + + description: Description of the test case. + + metrics: Full metric list to use for evaluation test case. + + name: Name of the test case. + + workspace_uuid: The workspace uuid. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/v2/gen-ai/evaluation_test_cases" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/evaluation_test_cases", + body=await async_maybe_transform( + { + "dataset_uuid": dataset_uuid, + "description": description, + "metrics": metrics, + "name": name, + "star_metric": star_metric, + "workspace_uuid": workspace_uuid, + }, + evaluation_test_case_create_params.EvaluationTestCaseCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluationTestCaseCreateResponse, + ) + + async def retrieve( + self, + test_case_uuid: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationTestCaseRetrieveResponse: + """ + To retrive information about an existing evaluation test case, send a GET + request to `/v2/gen-ai/evaluation_test_case/{test_case_uuid}`. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not test_case_uuid: + raise ValueError(f"Expected a non-empty value for `test_case_uuid` but received {test_case_uuid!r}") + return await self._get( + f"/v2/gen-ai/evaluation_test_cases/{test_case_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/evaluation_test_cases/{test_case_uuid}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluationTestCaseRetrieveResponse, + ) + + async def update( + self, + path_test_case_uuid: str, + *, + dataset_uuid: str | NotGiven = NOT_GIVEN, + description: str | NotGiven = NOT_GIVEN, + metrics: evaluation_test_case_update_params.Metrics | NotGiven = NOT_GIVEN, + name: str | NotGiven = NOT_GIVEN, + star_metric: APIStarMetricParam | NotGiven = NOT_GIVEN, + body_test_case_uuid: str | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationTestCaseUpdateResponse: + """ + To update an evaluation test-case send a POST request to + `/v2/gen-ai/evaluation_test_cases/{test_case_uuid}`. + + Args: + dataset_uuid: Dataset against which the test‑case is executed. + + description: Description of the test case. + + name: Name of the test case. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not path_test_case_uuid: + raise ValueError( + f"Expected a non-empty value for `path_test_case_uuid` but received {path_test_case_uuid!r}" + ) + return await self._post( + f"/v2/gen-ai/evaluation_test_cases/{path_test_case_uuid}" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/evaluation_test_cases/{path_test_case_uuid}", + body=await async_maybe_transform( + { + "dataset_uuid": dataset_uuid, + "description": description, + "metrics": metrics, + "name": name, + "star_metric": star_metric, + "body_test_case_uuid": body_test_case_uuid, + }, + evaluation_test_case_update_params.EvaluationTestCaseUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluationTestCaseUpdateResponse, + ) + + async def list( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationTestCaseListResponse: + """ + To list all evaluation test cases, send a GET request to + `/v2/gen-ai/evaluation_test_cases`. + """ + return await self._get( + "/v2/gen-ai/evaluation_test_cases" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/evaluation_test_cases", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluationTestCaseListResponse, + ) + + async def list_evaluation_runs( + self, + evaluation_test_case_uuid: str, + *, + evaluation_test_case_version: int | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EvaluationTestCaseListEvaluationRunsResponse: + """ + To list all evaluation runs by test case, send a GET request to + `/v2/gen-ai/evaluation_test_cases/{evaluation_test_case_uuid}/evaluation_runs`. + + Args: + evaluation_test_case_version: Version of the test case. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not evaluation_test_case_uuid: + raise ValueError( + f"Expected a non-empty value for `evaluation_test_case_uuid` but received {evaluation_test_case_uuid!r}" + ) + return await self._get( + f"/v2/gen-ai/evaluation_test_cases/{evaluation_test_case_uuid}/evaluation_runs" + if self._client._base_url_overridden + else f"https://api.digitalocean.com/v2/gen-ai/evaluation_test_cases/{evaluation_test_case_uuid}/evaluation_runs", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + {"evaluation_test_case_version": evaluation_test_case_version}, + evaluation_test_case_list_evaluation_runs_params.EvaluationTestCaseListEvaluationRunsParams, + ), + ), + cast_to=EvaluationTestCaseListEvaluationRunsResponse, + ) + + +class EvaluationTestCasesResourceWithRawResponse: + def __init__(self, evaluation_test_cases: EvaluationTestCasesResource) -> None: + self._evaluation_test_cases = evaluation_test_cases + + self.create = to_raw_response_wrapper( + evaluation_test_cases.create, + ) + self.retrieve = to_raw_response_wrapper( + evaluation_test_cases.retrieve, + ) + self.update = to_raw_response_wrapper( + evaluation_test_cases.update, + ) + self.list = to_raw_response_wrapper( + evaluation_test_cases.list, + ) + self.list_evaluation_runs = to_raw_response_wrapper( + evaluation_test_cases.list_evaluation_runs, + ) + + +class AsyncEvaluationTestCasesResourceWithRawResponse: + def __init__(self, evaluation_test_cases: AsyncEvaluationTestCasesResource) -> None: + self._evaluation_test_cases = evaluation_test_cases + + self.create = async_to_raw_response_wrapper( + evaluation_test_cases.create, + ) + self.retrieve = async_to_raw_response_wrapper( + evaluation_test_cases.retrieve, + ) + self.update = async_to_raw_response_wrapper( + evaluation_test_cases.update, + ) + self.list = async_to_raw_response_wrapper( + evaluation_test_cases.list, + ) + self.list_evaluation_runs = async_to_raw_response_wrapper( + evaluation_test_cases.list_evaluation_runs, + ) + + +class EvaluationTestCasesResourceWithStreamingResponse: + def __init__(self, evaluation_test_cases: EvaluationTestCasesResource) -> None: + self._evaluation_test_cases = evaluation_test_cases + + self.create = to_streamed_response_wrapper( + evaluation_test_cases.create, + ) + self.retrieve = to_streamed_response_wrapper( + evaluation_test_cases.retrieve, + ) + self.update = to_streamed_response_wrapper( + evaluation_test_cases.update, + ) + self.list = to_streamed_response_wrapper( + evaluation_test_cases.list, + ) + self.list_evaluation_runs = to_streamed_response_wrapper( + evaluation_test_cases.list_evaluation_runs, + ) + + +class AsyncEvaluationTestCasesResourceWithStreamingResponse: + def __init__(self, evaluation_test_cases: AsyncEvaluationTestCasesResource) -> None: + self._evaluation_test_cases = evaluation_test_cases + + self.create = async_to_streamed_response_wrapper( + evaluation_test_cases.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + evaluation_test_cases.retrieve, + ) + self.update = async_to_streamed_response_wrapper( + evaluation_test_cases.update, + ) + self.list = async_to_streamed_response_wrapper( + evaluation_test_cases.list, + ) + self.list_evaluation_runs = async_to_streamed_response_wrapper( + evaluation_test_cases.list_evaluation_runs, + ) diff --git a/src/gradientai/resources/regions/regions.py b/src/gradientai/resources/regions/regions.py new file mode 100644 index 00000000..5f74b2e8 --- /dev/null +++ b/src/gradientai/resources/regions/regions.py @@ -0,0 +1,352 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from ...types import region_list_params +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._base_client import make_request_options +from .evaluation_datasets import ( + EvaluationDatasetsResource, + AsyncEvaluationDatasetsResource, + EvaluationDatasetsResourceWithRawResponse, + AsyncEvaluationDatasetsResourceWithRawResponse, + EvaluationDatasetsResourceWithStreamingResponse, + AsyncEvaluationDatasetsResourceWithStreamingResponse, +) +from .evaluation_test_cases import ( + EvaluationTestCasesResource, + AsyncEvaluationTestCasesResource, + EvaluationTestCasesResourceWithRawResponse, + AsyncEvaluationTestCasesResourceWithRawResponse, + EvaluationTestCasesResourceWithStreamingResponse, + AsyncEvaluationTestCasesResourceWithStreamingResponse, +) +from ...types.region_list_response import RegionListResponse +from .evaluation_runs.evaluation_runs import ( + EvaluationRunsResource, + AsyncEvaluationRunsResource, + EvaluationRunsResourceWithRawResponse, + AsyncEvaluationRunsResourceWithRawResponse, + EvaluationRunsResourceWithStreamingResponse, + AsyncEvaluationRunsResourceWithStreamingResponse, +) +from ...types.region_list_evaluation_metrics_response import RegionListEvaluationMetricsResponse + +__all__ = ["RegionsResource", "AsyncRegionsResource"] + + +class RegionsResource(SyncAPIResource): + @cached_property + def evaluation_runs(self) -> EvaluationRunsResource: + return EvaluationRunsResource(self._client) + + @cached_property + def evaluation_test_cases(self) -> EvaluationTestCasesResource: + return EvaluationTestCasesResource(self._client) + + @cached_property + def evaluation_datasets(self) -> EvaluationDatasetsResource: + return EvaluationDatasetsResource(self._client) + + @cached_property + def with_raw_response(self) -> RegionsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + """ + return RegionsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> RegionsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + """ + return RegionsResourceWithStreamingResponse(self) + + def list( + self, + *, + serves_batch: bool | NotGiven = NOT_GIVEN, + serves_inference: bool | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> RegionListResponse: + """ + To list all datacenter regions, send a GET request to `/v2/gen-ai/regions`. + + Args: + serves_batch: include datacenters that are capable of running batch jobs. + + serves_inference: include datacenters that serve inference. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/v2/gen-ai/regions" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/regions", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "serves_batch": serves_batch, + "serves_inference": serves_inference, + }, + region_list_params.RegionListParams, + ), + ), + cast_to=RegionListResponse, + ) + + def list_evaluation_metrics( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> RegionListEvaluationMetricsResponse: + """ + To list all evaluation metrics, send a GET request to + `/v2/gen-ai/evaluation_metrics`. + """ + return self._get( + "/v2/gen-ai/evaluation_metrics" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/evaluation_metrics", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=RegionListEvaluationMetricsResponse, + ) + + +class AsyncRegionsResource(AsyncAPIResource): + @cached_property + def evaluation_runs(self) -> AsyncEvaluationRunsResource: + return AsyncEvaluationRunsResource(self._client) + + @cached_property + def evaluation_test_cases(self) -> AsyncEvaluationTestCasesResource: + return AsyncEvaluationTestCasesResource(self._client) + + @cached_property + def evaluation_datasets(self) -> AsyncEvaluationDatasetsResource: + return AsyncEvaluationDatasetsResource(self._client) + + @cached_property + def with_raw_response(self) -> AsyncRegionsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + """ + return AsyncRegionsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncRegionsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + """ + return AsyncRegionsResourceWithStreamingResponse(self) + + async def list( + self, + *, + serves_batch: bool | NotGiven = NOT_GIVEN, + serves_inference: bool | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> RegionListResponse: + """ + To list all datacenter regions, send a GET request to `/v2/gen-ai/regions`. + + Args: + serves_batch: include datacenters that are capable of running batch jobs. + + serves_inference: include datacenters that serve inference. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/v2/gen-ai/regions" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/regions", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "serves_batch": serves_batch, + "serves_inference": serves_inference, + }, + region_list_params.RegionListParams, + ), + ), + cast_to=RegionListResponse, + ) + + async def list_evaluation_metrics( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> RegionListEvaluationMetricsResponse: + """ + To list all evaluation metrics, send a GET request to + `/v2/gen-ai/evaluation_metrics`. + """ + return await self._get( + "/v2/gen-ai/evaluation_metrics" + if self._client._base_url_overridden + else "https://api.digitalocean.com/v2/gen-ai/evaluation_metrics", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=RegionListEvaluationMetricsResponse, + ) + + +class RegionsResourceWithRawResponse: + def __init__(self, regions: RegionsResource) -> None: + self._regions = regions + + self.list = to_raw_response_wrapper( + regions.list, + ) + self.list_evaluation_metrics = to_raw_response_wrapper( + regions.list_evaluation_metrics, + ) + + @cached_property + def evaluation_runs(self) -> EvaluationRunsResourceWithRawResponse: + return EvaluationRunsResourceWithRawResponse(self._regions.evaluation_runs) + + @cached_property + def evaluation_test_cases(self) -> EvaluationTestCasesResourceWithRawResponse: + return EvaluationTestCasesResourceWithRawResponse(self._regions.evaluation_test_cases) + + @cached_property + def evaluation_datasets(self) -> EvaluationDatasetsResourceWithRawResponse: + return EvaluationDatasetsResourceWithRawResponse(self._regions.evaluation_datasets) + + +class AsyncRegionsResourceWithRawResponse: + def __init__(self, regions: AsyncRegionsResource) -> None: + self._regions = regions + + self.list = async_to_raw_response_wrapper( + regions.list, + ) + self.list_evaluation_metrics = async_to_raw_response_wrapper( + regions.list_evaluation_metrics, + ) + + @cached_property + def evaluation_runs(self) -> AsyncEvaluationRunsResourceWithRawResponse: + return AsyncEvaluationRunsResourceWithRawResponse(self._regions.evaluation_runs) + + @cached_property + def evaluation_test_cases(self) -> AsyncEvaluationTestCasesResourceWithRawResponse: + return AsyncEvaluationTestCasesResourceWithRawResponse(self._regions.evaluation_test_cases) + + @cached_property + def evaluation_datasets(self) -> AsyncEvaluationDatasetsResourceWithRawResponse: + return AsyncEvaluationDatasetsResourceWithRawResponse(self._regions.evaluation_datasets) + + +class RegionsResourceWithStreamingResponse: + def __init__(self, regions: RegionsResource) -> None: + self._regions = regions + + self.list = to_streamed_response_wrapper( + regions.list, + ) + self.list_evaluation_metrics = to_streamed_response_wrapper( + regions.list_evaluation_metrics, + ) + + @cached_property + def evaluation_runs(self) -> EvaluationRunsResourceWithStreamingResponse: + return EvaluationRunsResourceWithStreamingResponse(self._regions.evaluation_runs) + + @cached_property + def evaluation_test_cases(self) -> EvaluationTestCasesResourceWithStreamingResponse: + return EvaluationTestCasesResourceWithStreamingResponse(self._regions.evaluation_test_cases) + + @cached_property + def evaluation_datasets(self) -> EvaluationDatasetsResourceWithStreamingResponse: + return EvaluationDatasetsResourceWithStreamingResponse(self._regions.evaluation_datasets) + + +class AsyncRegionsResourceWithStreamingResponse: + def __init__(self, regions: AsyncRegionsResource) -> None: + self._regions = regions + + self.list = async_to_streamed_response_wrapper( + regions.list, + ) + self.list_evaluation_metrics = async_to_streamed_response_wrapper( + regions.list_evaluation_metrics, + ) + + @cached_property + def evaluation_runs(self) -> AsyncEvaluationRunsResourceWithStreamingResponse: + return AsyncEvaluationRunsResourceWithStreamingResponse(self._regions.evaluation_runs) + + @cached_property + def evaluation_test_cases(self) -> AsyncEvaluationTestCasesResourceWithStreamingResponse: + return AsyncEvaluationTestCasesResourceWithStreamingResponse(self._regions.evaluation_test_cases) + + @cached_property + def evaluation_datasets(self) -> AsyncEvaluationDatasetsResourceWithStreamingResponse: + return AsyncEvaluationDatasetsResourceWithStreamingResponse(self._regions.evaluation_datasets) diff --git a/src/digitalocean_genai_sdk/types/__init__.py b/src/gradientai/types/__init__.py similarity index 80% rename from src/digitalocean_genai_sdk/types/__init__.py rename to src/gradientai/types/__init__.py index ee516f83..d09aaa2a 100644 --- a/src/digitalocean_genai_sdk/types/__init__.py +++ b/src/gradientai/types/__init__.py @@ -2,33 +2,32 @@ from __future__ import annotations -from .model import Model as Model from .api_agent import APIAgent as APIAgent from .api_model import APIModel as APIModel from .api_agreement import APIAgreement as APIAgreement +from .api_workspace import APIWorkspace as APIWorkspace +from .api_agent_model import APIAgentModel as APIAgentModel from .api_indexing_job import APIIndexingJob as APIIndexingJob from .agent_list_params import AgentListParams as AgentListParams from .api_model_version import APIModelVersion as APIModelVersion +from .model_list_params import ModelListParams as ModelListParams from .api_knowledge_base import APIKnowledgeBase as APIKnowledgeBase from .region_list_params import RegionListParams as RegionListParams from .agent_create_params import AgentCreateParams as AgentCreateParams from .agent_list_response import AgentListResponse as AgentListResponse from .agent_update_params import AgentUpdateParams as AgentUpdateParams -from .api_key_list_params import APIKeyListParams as APIKeyListParams from .model_list_response import ModelListResponse as ModelListResponse from .api_retrieval_method import APIRetrievalMethod as APIRetrievalMethod from .region_list_response import RegionListResponse as RegionListResponse from .agent_create_response import AgentCreateResponse as AgentCreateResponse from .agent_delete_response import AgentDeleteResponse as AgentDeleteResponse from .agent_update_response import AgentUpdateResponse as AgentUpdateResponse -from .api_key_list_response import APIKeyListResponse as APIKeyListResponse +from .api_evaluation_metric import APIEvaluationMetric as APIEvaluationMetric from .api_agent_api_key_info import APIAgentAPIKeyInfo as APIAgentAPIKeyInfo from .agent_retrieve_response import AgentRetrieveResponse as AgentRetrieveResponse from .api_openai_api_key_info import APIOpenAIAPIKeyInfo as APIOpenAIAPIKeyInfo -from .embedding_create_params import EmbeddingCreateParams as EmbeddingCreateParams from .indexing_job_list_params import IndexingJobListParams as IndexingJobListParams from .api_deployment_visibility import APIDeploymentVisibility as APIDeploymentVisibility -from .embedding_create_response import EmbeddingCreateResponse as EmbeddingCreateResponse from .agent_update_status_params import AgentUpdateStatusParams as AgentUpdateStatusParams from .api_anthropic_api_key_info import APIAnthropicAPIKeyInfo as APIAnthropicAPIKeyInfo from .indexing_job_create_params import IndexingJobCreateParams as IndexingJobCreateParams @@ -39,19 +38,16 @@ from .knowledge_base_create_params import KnowledgeBaseCreateParams as KnowledgeBaseCreateParams from .knowledge_base_list_response import KnowledgeBaseListResponse as KnowledgeBaseListResponse from .knowledge_base_update_params import KnowledgeBaseUpdateParams as KnowledgeBaseUpdateParams -from .chat_completion_token_logprob import ChatCompletionTokenLogprob as ChatCompletionTokenLogprob -from .chat_create_completion_params import ChatCreateCompletionParams as ChatCreateCompletionParams from .indexing_job_retrieve_response import IndexingJobRetrieveResponse as IndexingJobRetrieveResponse from .knowledge_base_create_response import KnowledgeBaseCreateResponse as KnowledgeBaseCreateResponse from .knowledge_base_delete_response import KnowledgeBaseDeleteResponse as KnowledgeBaseDeleteResponse from .knowledge_base_update_response import KnowledgeBaseUpdateResponse as KnowledgeBaseUpdateResponse -from .chat_create_completion_response import ChatCreateCompletionResponse as ChatCreateCompletionResponse from .knowledge_base_retrieve_response import KnowledgeBaseRetrieveResponse as KnowledgeBaseRetrieveResponse from .indexing_job_update_cancel_params import IndexingJobUpdateCancelParams as IndexingJobUpdateCancelParams from .indexing_job_update_cancel_response import IndexingJobUpdateCancelResponse as IndexingJobUpdateCancelResponse +from .region_list_evaluation_metrics_response import ( + RegionListEvaluationMetricsResponse as RegionListEvaluationMetricsResponse, +) from .indexing_job_retrieve_data_sources_response import ( IndexingJobRetrieveDataSourcesResponse as IndexingJobRetrieveDataSourcesResponse, ) -from .chat_completion_request_message_content_part_text_param import ( - ChatCompletionRequestMessageContentPartTextParam as ChatCompletionRequestMessageContentPartTextParam, -) diff --git a/src/digitalocean_genai_sdk/types/agent_create_params.py b/src/gradientai/types/agent_create_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agent_create_params.py rename to src/gradientai/types/agent_create_params.py diff --git a/src/digitalocean_genai_sdk/types/agent_create_response.py b/src/gradientai/types/agent_create_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agent_create_response.py rename to src/gradientai/types/agent_create_response.py diff --git a/src/digitalocean_genai_sdk/types/agent_delete_response.py b/src/gradientai/types/agent_delete_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agent_delete_response.py rename to src/gradientai/types/agent_delete_response.py diff --git a/src/digitalocean_genai_sdk/types/agent_list_params.py b/src/gradientai/types/agent_list_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agent_list_params.py rename to src/gradientai/types/agent_list_params.py diff --git a/src/digitalocean_genai_sdk/types/agent_list_response.py b/src/gradientai/types/agent_list_response.py similarity index 97% rename from src/digitalocean_genai_sdk/types/agent_list_response.py rename to src/gradientai/types/agent_list_response.py index 4cedbb39..97c0f0d5 100644 --- a/src/digitalocean_genai_sdk/types/agent_list_response.py +++ b/src/gradientai/types/agent_list_response.py @@ -5,8 +5,8 @@ from typing_extensions import Literal from .._models import BaseModel -from .api_model import APIModel from .agents.api_meta import APIMeta +from .api_agent_model import APIAgentModel from .agents.api_links import APILinks from .api_knowledge_base import APIKnowledgeBase from .api_retrieval_method import APIRetrievalMethod @@ -92,7 +92,7 @@ class AgentTemplate(BaseModel): max_tokens: Optional[int] = None - model: Optional[APIModel] = None + model: Optional[APIAgentModel] = None name: Optional[str] = None @@ -143,7 +143,7 @@ class Agent(BaseModel): response. """ - model: Optional[APIModel] = None + model: Optional[APIAgentModel] = None name: Optional[str] = None diff --git a/src/digitalocean_genai_sdk/types/agent_retrieve_response.py b/src/gradientai/types/agent_retrieve_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agent_retrieve_response.py rename to src/gradientai/types/agent_retrieve_response.py diff --git a/src/digitalocean_genai_sdk/types/agent_update_params.py b/src/gradientai/types/agent_update_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agent_update_params.py rename to src/gradientai/types/agent_update_params.py diff --git a/src/digitalocean_genai_sdk/types/agent_update_response.py b/src/gradientai/types/agent_update_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agent_update_response.py rename to src/gradientai/types/agent_update_response.py diff --git a/src/digitalocean_genai_sdk/types/agent_update_status_params.py b/src/gradientai/types/agent_update_status_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agent_update_status_params.py rename to src/gradientai/types/agent_update_status_params.py diff --git a/src/digitalocean_genai_sdk/types/agent_update_status_response.py b/src/gradientai/types/agent_update_status_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agent_update_status_response.py rename to src/gradientai/types/agent_update_status_response.py diff --git a/src/digitalocean_genai_sdk/types/agents/__init__.py b/src/gradientai/types/agents/__init__.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/__init__.py rename to src/gradientai/types/agents/__init__.py diff --git a/src/digitalocean_genai_sdk/types/agents/api_key_create_params.py b/src/gradientai/types/agents/api_key_create_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/api_key_create_params.py rename to src/gradientai/types/agents/api_key_create_params.py diff --git a/src/digitalocean_genai_sdk/types/agents/api_key_create_response.py b/src/gradientai/types/agents/api_key_create_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/api_key_create_response.py rename to src/gradientai/types/agents/api_key_create_response.py diff --git a/src/digitalocean_genai_sdk/types/agents/api_key_delete_response.py b/src/gradientai/types/agents/api_key_delete_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/api_key_delete_response.py rename to src/gradientai/types/agents/api_key_delete_response.py diff --git a/src/digitalocean_genai_sdk/types/agents/api_key_list_params.py b/src/gradientai/types/agents/api_key_list_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/api_key_list_params.py rename to src/gradientai/types/agents/api_key_list_params.py diff --git a/src/digitalocean_genai_sdk/types/agents/api_key_list_response.py b/src/gradientai/types/agents/api_key_list_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/api_key_list_response.py rename to src/gradientai/types/agents/api_key_list_response.py diff --git a/src/digitalocean_genai_sdk/types/agents/api_key_regenerate_response.py b/src/gradientai/types/agents/api_key_regenerate_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/api_key_regenerate_response.py rename to src/gradientai/types/agents/api_key_regenerate_response.py diff --git a/src/digitalocean_genai_sdk/types/agents/api_key_update_params.py b/src/gradientai/types/agents/api_key_update_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/api_key_update_params.py rename to src/gradientai/types/agents/api_key_update_params.py diff --git a/src/digitalocean_genai_sdk/types/agents/api_key_update_response.py b/src/gradientai/types/agents/api_key_update_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/api_key_update_response.py rename to src/gradientai/types/agents/api_key_update_response.py diff --git a/src/digitalocean_genai_sdk/types/agents/api_link_knowledge_base_output.py b/src/gradientai/types/agents/api_link_knowledge_base_output.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/api_link_knowledge_base_output.py rename to src/gradientai/types/agents/api_link_knowledge_base_output.py diff --git a/src/digitalocean_genai_sdk/types/agents/api_links.py b/src/gradientai/types/agents/api_links.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/api_links.py rename to src/gradientai/types/agents/api_links.py diff --git a/src/digitalocean_genai_sdk/types/agents/api_meta.py b/src/gradientai/types/agents/api_meta.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/api_meta.py rename to src/gradientai/types/agents/api_meta.py diff --git a/src/digitalocean_genai_sdk/types/agents/child_agent_add_params.py b/src/gradientai/types/agents/child_agent_add_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/child_agent_add_params.py rename to src/gradientai/types/agents/child_agent_add_params.py diff --git a/src/digitalocean_genai_sdk/types/agents/child_agent_add_response.py b/src/gradientai/types/agents/child_agent_add_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/child_agent_add_response.py rename to src/gradientai/types/agents/child_agent_add_response.py diff --git a/src/digitalocean_genai_sdk/types/agents/child_agent_delete_response.py b/src/gradientai/types/agents/child_agent_delete_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/child_agent_delete_response.py rename to src/gradientai/types/agents/child_agent_delete_response.py diff --git a/src/digitalocean_genai_sdk/types/agents/child_agent_update_params.py b/src/gradientai/types/agents/child_agent_update_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/child_agent_update_params.py rename to src/gradientai/types/agents/child_agent_update_params.py diff --git a/src/digitalocean_genai_sdk/types/agents/child_agent_update_response.py b/src/gradientai/types/agents/child_agent_update_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/child_agent_update_response.py rename to src/gradientai/types/agents/child_agent_update_response.py diff --git a/src/digitalocean_genai_sdk/types/agents/child_agent_view_response.py b/src/gradientai/types/agents/child_agent_view_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/child_agent_view_response.py rename to src/gradientai/types/agents/child_agent_view_response.py diff --git a/src/digitalocean_genai_sdk/types/agents/function_create_params.py b/src/gradientai/types/agents/function_create_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/function_create_params.py rename to src/gradientai/types/agents/function_create_params.py diff --git a/src/digitalocean_genai_sdk/types/agents/function_create_response.py b/src/gradientai/types/agents/function_create_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/function_create_response.py rename to src/gradientai/types/agents/function_create_response.py diff --git a/src/digitalocean_genai_sdk/types/agents/function_delete_response.py b/src/gradientai/types/agents/function_delete_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/function_delete_response.py rename to src/gradientai/types/agents/function_delete_response.py diff --git a/src/digitalocean_genai_sdk/types/agents/function_update_params.py b/src/gradientai/types/agents/function_update_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/function_update_params.py rename to src/gradientai/types/agents/function_update_params.py diff --git a/src/digitalocean_genai_sdk/types/agents/function_update_response.py b/src/gradientai/types/agents/function_update_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/function_update_response.py rename to src/gradientai/types/agents/function_update_response.py diff --git a/src/digitalocean_genai_sdk/types/agents/knowledge_base_detach_response.py b/src/gradientai/types/agents/knowledge_base_detach_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/knowledge_base_detach_response.py rename to src/gradientai/types/agents/knowledge_base_detach_response.py diff --git a/src/digitalocean_genai_sdk/types/agents/version_list_params.py b/src/gradientai/types/agents/version_list_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/version_list_params.py rename to src/gradientai/types/agents/version_list_params.py diff --git a/src/digitalocean_genai_sdk/types/agents/version_list_response.py b/src/gradientai/types/agents/version_list_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/version_list_response.py rename to src/gradientai/types/agents/version_list_response.py diff --git a/src/digitalocean_genai_sdk/types/agents/version_update_params.py b/src/gradientai/types/agents/version_update_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/version_update_params.py rename to src/gradientai/types/agents/version_update_params.py diff --git a/src/digitalocean_genai_sdk/types/agents/version_update_response.py b/src/gradientai/types/agents/version_update_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/agents/version_update_response.py rename to src/gradientai/types/agents/version_update_response.py diff --git a/src/digitalocean_genai_sdk/types/api_agent.py b/src/gradientai/types/api_agent.py similarity index 96% rename from src/digitalocean_genai_sdk/types/api_agent.py rename to src/gradientai/types/api_agent.py index d6e18ca2..1378950a 100644 --- a/src/digitalocean_genai_sdk/types/api_agent.py +++ b/src/gradientai/types/api_agent.py @@ -7,7 +7,7 @@ from typing_extensions import Literal from .._models import BaseModel -from .api_model import APIModel +from .api_agent_model import APIAgentModel from .api_knowledge_base import APIKnowledgeBase from .api_retrieval_method import APIRetrievalMethod from .api_agent_api_key_info import APIAgentAPIKeyInfo @@ -162,7 +162,7 @@ class Template(BaseModel): max_tokens: Optional[int] = None - model: Optional[APIModel] = None + model: Optional[APIAgentModel] = None name: Optional[str] = None @@ -222,7 +222,7 @@ class APIAgent(BaseModel): max_tokens: Optional[int] = None - model: Optional[APIModel] = None + model: Optional[APIAgentModel] = None name: Optional[str] = None @@ -261,3 +261,8 @@ class APIAgent(BaseModel): user_id: Optional[str] = None uuid: Optional[str] = None + + workspace: Optional["APIWorkspace"] = None + + +from .api_workspace import APIWorkspace diff --git a/src/digitalocean_genai_sdk/types/api_agent_api_key_info.py b/src/gradientai/types/api_agent_api_key_info.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_agent_api_key_info.py rename to src/gradientai/types/api_agent_api_key_info.py diff --git a/src/digitalocean_genai_sdk/types/api_model.py b/src/gradientai/types/api_agent_model.py similarity index 95% rename from src/digitalocean_genai_sdk/types/api_model.py rename to src/gradientai/types/api_agent_model.py index d680a638..1025321b 100644 --- a/src/digitalocean_genai_sdk/types/api_model.py +++ b/src/gradientai/types/api_agent_model.py @@ -8,10 +8,10 @@ from .api_agreement import APIAgreement from .api_model_version import APIModelVersion -__all__ = ["APIModel"] +__all__ = ["APIAgentModel"] -class APIModel(BaseModel): +class APIAgentModel(BaseModel): agreement: Optional[APIAgreement] = None created_at: Optional[datetime] = None diff --git a/src/digitalocean_genai_sdk/types/api_agreement.py b/src/gradientai/types/api_agreement.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_agreement.py rename to src/gradientai/types/api_agreement.py diff --git a/src/digitalocean_genai_sdk/types/api_anthropic_api_key_info.py b/src/gradientai/types/api_anthropic_api_key_info.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_anthropic_api_key_info.py rename to src/gradientai/types/api_anthropic_api_key_info.py diff --git a/src/digitalocean_genai_sdk/types/api_deployment_visibility.py b/src/gradientai/types/api_deployment_visibility.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_deployment_visibility.py rename to src/gradientai/types/api_deployment_visibility.py diff --git a/src/gradientai/types/api_evaluation_metric.py b/src/gradientai/types/api_evaluation_metric.py new file mode 100644 index 00000000..05390297 --- /dev/null +++ b/src/gradientai/types/api_evaluation_metric.py @@ -0,0 +1,24 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["APIEvaluationMetric"] + + +class APIEvaluationMetric(BaseModel): + description: Optional[str] = None + + metric_name: Optional[str] = None + + metric_type: Optional[ + Literal["METRIC_TYPE_UNSPECIFIED", "METRIC_TYPE_GENERAL_QUALITY", "METRIC_TYPE_RAG_AND_TOOL"] + ] = None + + metric_uuid: Optional[str] = None + + metric_value_type: Optional[ + Literal["METRIC_VALUE_TYPE_UNSPECIFIED", "METRIC_VALUE_TYPE_NUMBER", "METRIC_VALUE_TYPE_STRING"] + ] = None diff --git a/src/digitalocean_genai_sdk/types/api_indexing_job.py b/src/gradientai/types/api_indexing_job.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_indexing_job.py rename to src/gradientai/types/api_indexing_job.py diff --git a/src/digitalocean_genai_sdk/types/api_knowledge_base.py b/src/gradientai/types/api_knowledge_base.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_knowledge_base.py rename to src/gradientai/types/api_knowledge_base.py diff --git a/src/digitalocean_genai_sdk/types/api_key_list_response.py b/src/gradientai/types/api_model.py similarity index 65% rename from src/digitalocean_genai_sdk/types/api_key_list_response.py rename to src/gradientai/types/api_model.py index 360de7a4..c2bc1edd 100644 --- a/src/digitalocean_genai_sdk/types/api_key_list_response.py +++ b/src/gradientai/types/api_model.py @@ -1,18 +1,16 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -from typing import List, Optional +from typing import Optional from datetime import datetime from .._models import BaseModel from .api_agreement import APIAgreement -from .agents.api_meta import APIMeta -from .agents.api_links import APILinks from .api_model_version import APIModelVersion -__all__ = ["APIKeyListResponse", "Model"] +__all__ = ["APIModel"] -class Model(BaseModel): +class APIModel(BaseModel): agreement: Optional[APIAgreement] = None created_at: Optional[datetime] = None @@ -32,11 +30,3 @@ class Model(BaseModel): uuid: Optional[str] = None version: Optional[APIModelVersion] = None - - -class APIKeyListResponse(BaseModel): - links: Optional[APILinks] = None - - meta: Optional[APIMeta] = None - - models: Optional[List[Model]] = None diff --git a/src/digitalocean_genai_sdk/types/api_model_version.py b/src/gradientai/types/api_model_version.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_model_version.py rename to src/gradientai/types/api_model_version.py diff --git a/src/digitalocean_genai_sdk/types/api_openai_api_key_info.py b/src/gradientai/types/api_openai_api_key_info.py similarity index 84% rename from src/digitalocean_genai_sdk/types/api_openai_api_key_info.py rename to src/gradientai/types/api_openai_api_key_info.py index 39328f80..7467cfc2 100644 --- a/src/digitalocean_genai_sdk/types/api_openai_api_key_info.py +++ b/src/gradientai/types/api_openai_api_key_info.py @@ -4,7 +4,7 @@ from datetime import datetime from .._models import BaseModel -from .api_model import APIModel +from .api_agent_model import APIAgentModel __all__ = ["APIOpenAIAPIKeyInfo"] @@ -16,7 +16,7 @@ class APIOpenAIAPIKeyInfo(BaseModel): deleted_at: Optional[datetime] = None - models: Optional[List[APIModel]] = None + models: Optional[List[APIAgentModel]] = None name: Optional[str] = None diff --git a/src/digitalocean_genai_sdk/types/api_retrieval_method.py b/src/gradientai/types/api_retrieval_method.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_retrieval_method.py rename to src/gradientai/types/api_retrieval_method.py diff --git a/src/gradientai/types/api_workspace.py b/src/gradientai/types/api_workspace.py new file mode 100644 index 00000000..b170d504 --- /dev/null +++ b/src/gradientai/types/api_workspace.py @@ -0,0 +1,36 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List, Optional +from datetime import datetime + +from .._models import BaseModel +from .regions.api_evaluation_test_case import APIEvaluationTestCase + +__all__ = ["APIWorkspace"] + + +class APIWorkspace(BaseModel): + agents: Optional[List["APIAgent"]] = None + + created_at: Optional[datetime] = None + + created_by: Optional[str] = None + + created_by_email: Optional[str] = None + + deleted_at: Optional[datetime] = None + + description: Optional[str] = None + + evaluation_test_cases: Optional[List[APIEvaluationTestCase]] = None + + name: Optional[str] = None + + updated_at: Optional[datetime] = None + + uuid: Optional[str] = None + + +from .api_agent import APIAgent diff --git a/src/gradientai/types/chat/__init__.py b/src/gradientai/types/chat/__init__.py new file mode 100644 index 00000000..59553f68 --- /dev/null +++ b/src/gradientai/types/chat/__init__.py @@ -0,0 +1,7 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .completion_create_params import CompletionCreateParams as CompletionCreateParams +from .completion_create_response import CompletionCreateResponse as CompletionCreateResponse +from .chat_completion_token_logprob import ChatCompletionTokenLogprob as ChatCompletionTokenLogprob diff --git a/src/digitalocean_genai_sdk/types/chat_completion_token_logprob.py b/src/gradientai/types/chat/chat_completion_token_logprob.py similarity index 98% rename from src/digitalocean_genai_sdk/types/chat_completion_token_logprob.py rename to src/gradientai/types/chat/chat_completion_token_logprob.py index 78de1dfa..c69e2589 100644 --- a/src/digitalocean_genai_sdk/types/chat_completion_token_logprob.py +++ b/src/gradientai/types/chat/chat_completion_token_logprob.py @@ -2,7 +2,7 @@ from typing import List, Optional -from .._models import BaseModel +from ..._models import BaseModel __all__ = ["ChatCompletionTokenLogprob", "TopLogprob"] diff --git a/src/digitalocean_genai_sdk/types/chat_create_completion_params.py b/src/gradientai/types/chat/completion_create_params.py similarity index 81% rename from src/digitalocean_genai_sdk/types/chat_create_completion_params.py rename to src/gradientai/types/chat/completion_create_params.py index 05c427b1..11d032ff 100644 --- a/src/digitalocean_genai_sdk/types/chat_create_completion_params.py +++ b/src/gradientai/types/chat/completion_create_params.py @@ -5,22 +5,18 @@ from typing import Dict, List, Union, Iterable, Optional from typing_extensions import Literal, Required, TypeAlias, TypedDict -from .chat_completion_request_message_content_part_text_param import ChatCompletionRequestMessageContentPartTextParam - __all__ = [ - "ChatCreateCompletionParams", + "CompletionCreateParams", "Message", "MessageChatCompletionRequestSystemMessage", "MessageChatCompletionRequestDeveloperMessage", "MessageChatCompletionRequestUserMessage", "MessageChatCompletionRequestAssistantMessage", - "MessageChatCompletionRequestAssistantMessageContentArrayOfContentPart", - "MessageChatCompletionRequestAssistantMessageContentArrayOfContentPartChatCompletionRequestMessageContentPartRefusal", "StreamOptions", ] -class ChatCreateCompletionParams(TypedDict, total=False): +class CompletionCreateParams(TypedDict, total=False): messages: Required[Iterable[Message]] """A list of messages comprising the conversation so far.""" @@ -137,7 +133,7 @@ class ChatCreateCompletionParams(TypedDict, total=False): class MessageChatCompletionRequestSystemMessage(TypedDict, total=False): - content: Required[Union[str, Iterable[ChatCompletionRequestMessageContentPartTextParam]]] + content: Required[Union[str, List[str]]] """The contents of the system message.""" role: Required[Literal["system"]] @@ -145,7 +141,7 @@ class MessageChatCompletionRequestSystemMessage(TypedDict, total=False): class MessageChatCompletionRequestDeveloperMessage(TypedDict, total=False): - content: Required[Union[str, Iterable[ChatCompletionRequestMessageContentPartTextParam]]] + content: Required[Union[str, List[str]]] """The contents of the developer message.""" role: Required[Literal["developer"]] @@ -153,39 +149,20 @@ class MessageChatCompletionRequestDeveloperMessage(TypedDict, total=False): class MessageChatCompletionRequestUserMessage(TypedDict, total=False): - content: Required[Union[str, Iterable[ChatCompletionRequestMessageContentPartTextParam]]] + content: Required[Union[str, List[str]]] """The contents of the user message.""" role: Required[Literal["user"]] """The role of the messages author, in this case `user`.""" -class MessageChatCompletionRequestAssistantMessageContentArrayOfContentPartChatCompletionRequestMessageContentPartRefusal( - TypedDict, total=False -): - refusal: Required[str] - """The refusal message generated by the model.""" - - type: Required[Literal["refusal"]] - """The type of the content part.""" - - -MessageChatCompletionRequestAssistantMessageContentArrayOfContentPart: TypeAlias = Union[ - ChatCompletionRequestMessageContentPartTextParam, - MessageChatCompletionRequestAssistantMessageContentArrayOfContentPartChatCompletionRequestMessageContentPartRefusal, -] - - class MessageChatCompletionRequestAssistantMessage(TypedDict, total=False): role: Required[Literal["assistant"]] """The role of the messages author, in this case `assistant`.""" - content: Union[str, Iterable[MessageChatCompletionRequestAssistantMessageContentArrayOfContentPart], None] + content: Union[str, List[str], None] """The contents of the assistant message.""" - refusal: Optional[str] - """The refusal message by the assistant.""" - Message: TypeAlias = Union[ MessageChatCompletionRequestSystemMessage, diff --git a/src/digitalocean_genai_sdk/types/chat_create_completion_response.py b/src/gradientai/types/chat/completion_create_response.py similarity index 92% rename from src/digitalocean_genai_sdk/types/chat_create_completion_response.py rename to src/gradientai/types/chat/completion_create_response.py index e1f20038..1ac59a28 100644 --- a/src/digitalocean_genai_sdk/types/chat_create_completion_response.py +++ b/src/gradientai/types/chat/completion_create_response.py @@ -3,10 +3,10 @@ from typing import List, Optional from typing_extensions import Literal -from .._models import BaseModel +from ..._models import BaseModel from .chat_completion_token_logprob import ChatCompletionTokenLogprob -__all__ = ["ChatCreateCompletionResponse", "Choice", "ChoiceLogprobs", "ChoiceMessage", "Usage"] +__all__ = ["CompletionCreateResponse", "Choice", "ChoiceLogprobs", "ChoiceMessage", "Usage"] class ChoiceLogprobs(BaseModel): @@ -58,7 +58,7 @@ class Usage(BaseModel): """Total number of tokens used in the request (prompt + completion).""" -class ChatCreateCompletionResponse(BaseModel): +class CompletionCreateResponse(BaseModel): id: str """A unique identifier for the chat completion.""" diff --git a/src/digitalocean_genai_sdk/types/indexing_job_create_params.py b/src/gradientai/types/indexing_job_create_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/indexing_job_create_params.py rename to src/gradientai/types/indexing_job_create_params.py diff --git a/src/digitalocean_genai_sdk/types/indexing_job_create_response.py b/src/gradientai/types/indexing_job_create_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/indexing_job_create_response.py rename to src/gradientai/types/indexing_job_create_response.py diff --git a/src/digitalocean_genai_sdk/types/indexing_job_list_params.py b/src/gradientai/types/indexing_job_list_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/indexing_job_list_params.py rename to src/gradientai/types/indexing_job_list_params.py diff --git a/src/digitalocean_genai_sdk/types/indexing_job_list_response.py b/src/gradientai/types/indexing_job_list_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/indexing_job_list_response.py rename to src/gradientai/types/indexing_job_list_response.py diff --git a/src/digitalocean_genai_sdk/types/indexing_job_retrieve_data_sources_response.py b/src/gradientai/types/indexing_job_retrieve_data_sources_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/indexing_job_retrieve_data_sources_response.py rename to src/gradientai/types/indexing_job_retrieve_data_sources_response.py diff --git a/src/digitalocean_genai_sdk/types/indexing_job_retrieve_response.py b/src/gradientai/types/indexing_job_retrieve_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/indexing_job_retrieve_response.py rename to src/gradientai/types/indexing_job_retrieve_response.py diff --git a/src/digitalocean_genai_sdk/types/indexing_job_update_cancel_params.py b/src/gradientai/types/indexing_job_update_cancel_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/indexing_job_update_cancel_params.py rename to src/gradientai/types/indexing_job_update_cancel_params.py diff --git a/src/digitalocean_genai_sdk/types/indexing_job_update_cancel_response.py b/src/gradientai/types/indexing_job_update_cancel_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/indexing_job_update_cancel_response.py rename to src/gradientai/types/indexing_job_update_cancel_response.py diff --git a/src/digitalocean_genai_sdk/types/api_keys/__init__.py b/src/gradientai/types/inference/__init__.py similarity index 88% rename from src/digitalocean_genai_sdk/types/api_keys/__init__.py rename to src/gradientai/types/inference/__init__.py index c3cbcd6d..829340d7 100644 --- a/src/digitalocean_genai_sdk/types/api_keys/__init__.py +++ b/src/gradientai/types/inference/__init__.py @@ -2,7 +2,9 @@ from __future__ import annotations +from .model import Model as Model from .api_key_list_params import APIKeyListParams as APIKeyListParams +from .model_list_response import ModelListResponse as ModelListResponse from .api_key_create_params import APIKeyCreateParams as APIKeyCreateParams from .api_key_list_response import APIKeyListResponse as APIKeyListResponse from .api_key_update_params import APIKeyUpdateParams as APIKeyUpdateParams diff --git a/src/digitalocean_genai_sdk/types/api_keys/api_key_create_params.py b/src/gradientai/types/inference/api_key_create_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_keys/api_key_create_params.py rename to src/gradientai/types/inference/api_key_create_params.py diff --git a/src/digitalocean_genai_sdk/types/api_keys/api_key_create_response.py b/src/gradientai/types/inference/api_key_create_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_keys/api_key_create_response.py rename to src/gradientai/types/inference/api_key_create_response.py diff --git a/src/digitalocean_genai_sdk/types/api_keys/api_key_delete_response.py b/src/gradientai/types/inference/api_key_delete_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_keys/api_key_delete_response.py rename to src/gradientai/types/inference/api_key_delete_response.py diff --git a/src/digitalocean_genai_sdk/types/api_keys/api_key_list_params.py b/src/gradientai/types/inference/api_key_list_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_keys/api_key_list_params.py rename to src/gradientai/types/inference/api_key_list_params.py diff --git a/src/digitalocean_genai_sdk/types/api_keys/api_key_list_response.py b/src/gradientai/types/inference/api_key_list_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_keys/api_key_list_response.py rename to src/gradientai/types/inference/api_key_list_response.py diff --git a/src/digitalocean_genai_sdk/types/api_keys/api_key_update_params.py b/src/gradientai/types/inference/api_key_update_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_keys/api_key_update_params.py rename to src/gradientai/types/inference/api_key_update_params.py diff --git a/src/digitalocean_genai_sdk/types/api_keys/api_key_update_regenerate_response.py b/src/gradientai/types/inference/api_key_update_regenerate_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_keys/api_key_update_regenerate_response.py rename to src/gradientai/types/inference/api_key_update_regenerate_response.py diff --git a/src/digitalocean_genai_sdk/types/api_keys/api_key_update_response.py b/src/gradientai/types/inference/api_key_update_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_keys/api_key_update_response.py rename to src/gradientai/types/inference/api_key_update_response.py diff --git a/src/digitalocean_genai_sdk/types/api_keys/api_model_api_key_info.py b/src/gradientai/types/inference/api_model_api_key_info.py similarity index 100% rename from src/digitalocean_genai_sdk/types/api_keys/api_model_api_key_info.py rename to src/gradientai/types/inference/api_model_api_key_info.py diff --git a/src/digitalocean_genai_sdk/types/model.py b/src/gradientai/types/inference/model.py similarity index 93% rename from src/digitalocean_genai_sdk/types/model.py rename to src/gradientai/types/inference/model.py index 2631ee8d..ed8843e8 100644 --- a/src/digitalocean_genai_sdk/types/model.py +++ b/src/gradientai/types/inference/model.py @@ -2,7 +2,7 @@ from typing_extensions import Literal -from .._models import BaseModel +from ..._models import BaseModel __all__ = ["Model"] diff --git a/src/digitalocean_genai_sdk/types/model_list_response.py b/src/gradientai/types/inference/model_list_response.py similarity index 90% rename from src/digitalocean_genai_sdk/types/model_list_response.py rename to src/gradientai/types/inference/model_list_response.py index 8f835449..01bf3b62 100644 --- a/src/digitalocean_genai_sdk/types/model_list_response.py +++ b/src/gradientai/types/inference/model_list_response.py @@ -4,7 +4,7 @@ from typing_extensions import Literal from .model import Model -from .._models import BaseModel +from ..._models import BaseModel __all__ = ["ModelListResponse"] diff --git a/src/digitalocean_genai_sdk/types/knowledge_base_create_params.py b/src/gradientai/types/knowledge_base_create_params.py similarity index 94% rename from src/digitalocean_genai_sdk/types/knowledge_base_create_params.py rename to src/gradientai/types/knowledge_base_create_params.py index 3a58166b..acf52e30 100644 --- a/src/digitalocean_genai_sdk/types/knowledge_base_create_params.py +++ b/src/gradientai/types/knowledge_base_create_params.py @@ -5,6 +5,7 @@ from typing import List, Iterable from typing_extensions import TypedDict +from .knowledge_bases.aws_data_source_param import AwsDataSourceParam from .knowledge_bases.api_spaces_data_source_param import APISpacesDataSourceParam from .knowledge_bases.api_file_upload_data_source_param import APIFileUploadDataSourceParam from .knowledge_bases.api_web_crawler_data_source_param import APIWebCrawlerDataSourceParam @@ -50,6 +51,8 @@ class KnowledgeBaseCreateParams(TypedDict, total=False): class Datasource(TypedDict, total=False): + aws_data_source: AwsDataSourceParam + bucket_name: str bucket_region: str diff --git a/src/digitalocean_genai_sdk/types/knowledge_base_create_response.py b/src/gradientai/types/knowledge_base_create_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_base_create_response.py rename to src/gradientai/types/knowledge_base_create_response.py diff --git a/src/digitalocean_genai_sdk/types/knowledge_base_delete_response.py b/src/gradientai/types/knowledge_base_delete_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_base_delete_response.py rename to src/gradientai/types/knowledge_base_delete_response.py diff --git a/src/digitalocean_genai_sdk/types/knowledge_base_list_params.py b/src/gradientai/types/knowledge_base_list_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_base_list_params.py rename to src/gradientai/types/knowledge_base_list_params.py diff --git a/src/digitalocean_genai_sdk/types/knowledge_base_list_response.py b/src/gradientai/types/knowledge_base_list_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_base_list_response.py rename to src/gradientai/types/knowledge_base_list_response.py diff --git a/src/digitalocean_genai_sdk/types/knowledge_base_retrieve_response.py b/src/gradientai/types/knowledge_base_retrieve_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_base_retrieve_response.py rename to src/gradientai/types/knowledge_base_retrieve_response.py diff --git a/src/digitalocean_genai_sdk/types/knowledge_base_update_params.py b/src/gradientai/types/knowledge_base_update_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_base_update_params.py rename to src/gradientai/types/knowledge_base_update_params.py diff --git a/src/digitalocean_genai_sdk/types/knowledge_base_update_response.py b/src/gradientai/types/knowledge_base_update_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_base_update_response.py rename to src/gradientai/types/knowledge_base_update_response.py diff --git a/src/digitalocean_genai_sdk/types/knowledge_bases/__init__.py b/src/gradientai/types/knowledge_bases/__init__.py similarity index 94% rename from src/digitalocean_genai_sdk/types/knowledge_bases/__init__.py rename to src/gradientai/types/knowledge_bases/__init__.py index f5f31034..859c3618 100644 --- a/src/digitalocean_genai_sdk/types/knowledge_bases/__init__.py +++ b/src/gradientai/types/knowledge_bases/__init__.py @@ -2,6 +2,7 @@ from __future__ import annotations +from .aws_data_source_param import AwsDataSourceParam as AwsDataSourceParam from .api_spaces_data_source import APISpacesDataSource as APISpacesDataSource from .data_source_list_params import DataSourceListParams as DataSourceListParams from .data_source_create_params import DataSourceCreateParams as DataSourceCreateParams diff --git a/src/digitalocean_genai_sdk/types/knowledge_bases/api_file_upload_data_source.py b/src/gradientai/types/knowledge_bases/api_file_upload_data_source.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_bases/api_file_upload_data_source.py rename to src/gradientai/types/knowledge_bases/api_file_upload_data_source.py diff --git a/src/digitalocean_genai_sdk/types/knowledge_bases/api_file_upload_data_source_param.py b/src/gradientai/types/knowledge_bases/api_file_upload_data_source_param.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_bases/api_file_upload_data_source_param.py rename to src/gradientai/types/knowledge_bases/api_file_upload_data_source_param.py diff --git a/src/digitalocean_genai_sdk/types/knowledge_bases/api_knowledge_base_data_source.py b/src/gradientai/types/knowledge_bases/api_knowledge_base_data_source.py similarity index 79% rename from src/digitalocean_genai_sdk/types/knowledge_bases/api_knowledge_base_data_source.py rename to src/gradientai/types/knowledge_bases/api_knowledge_base_data_source.py index df1cd3bb..57080aaa 100644 --- a/src/digitalocean_genai_sdk/types/knowledge_bases/api_knowledge_base_data_source.py +++ b/src/gradientai/types/knowledge_bases/api_knowledge_base_data_source.py @@ -9,10 +9,20 @@ from .api_file_upload_data_source import APIFileUploadDataSource from .api_web_crawler_data_source import APIWebCrawlerDataSource -__all__ = ["APIKnowledgeBaseDataSource"] +__all__ = ["APIKnowledgeBaseDataSource", "AwsDataSource"] + + +class AwsDataSource(BaseModel): + bucket_name: Optional[str] = None + + item_path: Optional[str] = None + + region: Optional[str] = None class APIKnowledgeBaseDataSource(BaseModel): + aws_data_source: Optional[AwsDataSource] = None + bucket_name: Optional[str] = None created_at: Optional[datetime] = None diff --git a/src/digitalocean_genai_sdk/types/knowledge_bases/api_spaces_data_source.py b/src/gradientai/types/knowledge_bases/api_spaces_data_source.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_bases/api_spaces_data_source.py rename to src/gradientai/types/knowledge_bases/api_spaces_data_source.py diff --git a/src/digitalocean_genai_sdk/types/knowledge_bases/api_spaces_data_source_param.py b/src/gradientai/types/knowledge_bases/api_spaces_data_source_param.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_bases/api_spaces_data_source_param.py rename to src/gradientai/types/knowledge_bases/api_spaces_data_source_param.py diff --git a/src/digitalocean_genai_sdk/types/knowledge_bases/api_web_crawler_data_source.py b/src/gradientai/types/knowledge_bases/api_web_crawler_data_source.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_bases/api_web_crawler_data_source.py rename to src/gradientai/types/knowledge_bases/api_web_crawler_data_source.py diff --git a/src/digitalocean_genai_sdk/types/knowledge_bases/api_web_crawler_data_source_param.py b/src/gradientai/types/knowledge_bases/api_web_crawler_data_source_param.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_bases/api_web_crawler_data_source_param.py rename to src/gradientai/types/knowledge_bases/api_web_crawler_data_source_param.py diff --git a/src/gradientai/types/knowledge_bases/aws_data_source_param.py b/src/gradientai/types/knowledge_bases/aws_data_source_param.py new file mode 100644 index 00000000..93d49228 --- /dev/null +++ b/src/gradientai/types/knowledge_bases/aws_data_source_param.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["AwsDataSourceParam"] + + +class AwsDataSourceParam(TypedDict, total=False): + bucket_name: str + + item_path: str + + key_id: str + + region: str + + secret_key: str diff --git a/src/digitalocean_genai_sdk/types/knowledge_bases/data_source_create_params.py b/src/gradientai/types/knowledge_bases/data_source_create_params.py similarity index 72% rename from src/digitalocean_genai_sdk/types/knowledge_bases/data_source_create_params.py rename to src/gradientai/types/knowledge_bases/data_source_create_params.py index b1abafdf..22bd76e7 100644 --- a/src/digitalocean_genai_sdk/types/knowledge_bases/data_source_create_params.py +++ b/src/gradientai/types/knowledge_bases/data_source_create_params.py @@ -5,29 +5,18 @@ from typing_extensions import Annotated, TypedDict from ..._utils import PropertyInfo +from .aws_data_source_param import AwsDataSourceParam from .api_spaces_data_source_param import APISpacesDataSourceParam from .api_web_crawler_data_source_param import APIWebCrawlerDataSourceParam -__all__ = ["DataSourceCreateParams", "AwsDataSource"] +__all__ = ["DataSourceCreateParams"] class DataSourceCreateParams(TypedDict, total=False): - aws_data_source: AwsDataSource + aws_data_source: AwsDataSourceParam body_knowledge_base_uuid: Annotated[str, PropertyInfo(alias="knowledge_base_uuid")] spaces_data_source: APISpacesDataSourceParam web_crawler_data_source: APIWebCrawlerDataSourceParam - - -class AwsDataSource(TypedDict, total=False): - bucket_name: str - - item_path: str - - key_id: str - - region: str - - secret_key: str diff --git a/src/digitalocean_genai_sdk/types/knowledge_bases/data_source_create_response.py b/src/gradientai/types/knowledge_bases/data_source_create_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_bases/data_source_create_response.py rename to src/gradientai/types/knowledge_bases/data_source_create_response.py diff --git a/src/digitalocean_genai_sdk/types/knowledge_bases/data_source_delete_response.py b/src/gradientai/types/knowledge_bases/data_source_delete_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_bases/data_source_delete_response.py rename to src/gradientai/types/knowledge_bases/data_source_delete_response.py diff --git a/src/digitalocean_genai_sdk/types/knowledge_bases/data_source_list_params.py b/src/gradientai/types/knowledge_bases/data_source_list_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_bases/data_source_list_params.py rename to src/gradientai/types/knowledge_bases/data_source_list_params.py diff --git a/src/digitalocean_genai_sdk/types/knowledge_bases/data_source_list_response.py b/src/gradientai/types/knowledge_bases/data_source_list_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/knowledge_bases/data_source_list_response.py rename to src/gradientai/types/knowledge_bases/data_source_list_response.py diff --git a/src/digitalocean_genai_sdk/types/api_key_list_params.py b/src/gradientai/types/model_list_params.py similarity index 94% rename from src/digitalocean_genai_sdk/types/api_key_list_params.py rename to src/gradientai/types/model_list_params.py index a1ab60dc..4abc1dc1 100644 --- a/src/digitalocean_genai_sdk/types/api_key_list_params.py +++ b/src/gradientai/types/model_list_params.py @@ -5,10 +5,10 @@ from typing import List from typing_extensions import Literal, TypedDict -__all__ = ["APIKeyListParams"] +__all__ = ["ModelListParams"] -class APIKeyListParams(TypedDict, total=False): +class ModelListParams(TypedDict, total=False): page: int """page number.""" diff --git a/src/gradientai/types/model_list_response.py b/src/gradientai/types/model_list_response.py new file mode 100644 index 00000000..e6f5fad5 --- /dev/null +++ b/src/gradientai/types/model_list_response.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from .._models import BaseModel +from .api_model import APIModel +from .agents.api_meta import APIMeta +from .agents.api_links import APILinks + +__all__ = ["ModelListResponse"] + + +class ModelListResponse(BaseModel): + links: Optional[APILinks] = None + + meta: Optional[APIMeta] = None + + models: Optional[List[APIModel]] = None diff --git a/src/digitalocean_genai_sdk/types/auth/__init__.py b/src/gradientai/types/providers/__init__.py similarity index 100% rename from src/digitalocean_genai_sdk/types/auth/__init__.py rename to src/gradientai/types/providers/__init__.py diff --git a/src/digitalocean_genai_sdk/types/providers/anthropic/__init__.py b/src/gradientai/types/providers/anthropic/__init__.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/anthropic/__init__.py rename to src/gradientai/types/providers/anthropic/__init__.py diff --git a/src/digitalocean_genai_sdk/types/providers/anthropic/key_create_params.py b/src/gradientai/types/providers/anthropic/key_create_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/anthropic/key_create_params.py rename to src/gradientai/types/providers/anthropic/key_create_params.py diff --git a/src/digitalocean_genai_sdk/types/providers/anthropic/key_create_response.py b/src/gradientai/types/providers/anthropic/key_create_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/anthropic/key_create_response.py rename to src/gradientai/types/providers/anthropic/key_create_response.py diff --git a/src/digitalocean_genai_sdk/types/providers/anthropic/key_delete_response.py b/src/gradientai/types/providers/anthropic/key_delete_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/anthropic/key_delete_response.py rename to src/gradientai/types/providers/anthropic/key_delete_response.py diff --git a/src/digitalocean_genai_sdk/types/providers/anthropic/key_list_agents_params.py b/src/gradientai/types/providers/anthropic/key_list_agents_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/anthropic/key_list_agents_params.py rename to src/gradientai/types/providers/anthropic/key_list_agents_params.py diff --git a/src/digitalocean_genai_sdk/types/providers/anthropic/key_list_agents_response.py b/src/gradientai/types/providers/anthropic/key_list_agents_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/anthropic/key_list_agents_response.py rename to src/gradientai/types/providers/anthropic/key_list_agents_response.py diff --git a/src/digitalocean_genai_sdk/types/providers/anthropic/key_list_params.py b/src/gradientai/types/providers/anthropic/key_list_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/anthropic/key_list_params.py rename to src/gradientai/types/providers/anthropic/key_list_params.py diff --git a/src/digitalocean_genai_sdk/types/providers/anthropic/key_list_response.py b/src/gradientai/types/providers/anthropic/key_list_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/anthropic/key_list_response.py rename to src/gradientai/types/providers/anthropic/key_list_response.py diff --git a/src/digitalocean_genai_sdk/types/providers/anthropic/key_retrieve_response.py b/src/gradientai/types/providers/anthropic/key_retrieve_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/anthropic/key_retrieve_response.py rename to src/gradientai/types/providers/anthropic/key_retrieve_response.py diff --git a/src/digitalocean_genai_sdk/types/providers/anthropic/key_update_params.py b/src/gradientai/types/providers/anthropic/key_update_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/anthropic/key_update_params.py rename to src/gradientai/types/providers/anthropic/key_update_params.py diff --git a/src/digitalocean_genai_sdk/types/providers/anthropic/key_update_response.py b/src/gradientai/types/providers/anthropic/key_update_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/anthropic/key_update_response.py rename to src/gradientai/types/providers/anthropic/key_update_response.py diff --git a/src/digitalocean_genai_sdk/types/providers/openai/__init__.py b/src/gradientai/types/providers/openai/__init__.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/openai/__init__.py rename to src/gradientai/types/providers/openai/__init__.py diff --git a/src/digitalocean_genai_sdk/types/providers/openai/key_create_params.py b/src/gradientai/types/providers/openai/key_create_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/openai/key_create_params.py rename to src/gradientai/types/providers/openai/key_create_params.py diff --git a/src/digitalocean_genai_sdk/types/providers/openai/key_create_response.py b/src/gradientai/types/providers/openai/key_create_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/openai/key_create_response.py rename to src/gradientai/types/providers/openai/key_create_response.py diff --git a/src/digitalocean_genai_sdk/types/providers/openai/key_delete_response.py b/src/gradientai/types/providers/openai/key_delete_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/openai/key_delete_response.py rename to src/gradientai/types/providers/openai/key_delete_response.py diff --git a/src/digitalocean_genai_sdk/types/providers/openai/key_list_params.py b/src/gradientai/types/providers/openai/key_list_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/openai/key_list_params.py rename to src/gradientai/types/providers/openai/key_list_params.py diff --git a/src/digitalocean_genai_sdk/types/providers/openai/key_list_response.py b/src/gradientai/types/providers/openai/key_list_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/openai/key_list_response.py rename to src/gradientai/types/providers/openai/key_list_response.py diff --git a/src/digitalocean_genai_sdk/types/providers/openai/key_retrieve_agents_params.py b/src/gradientai/types/providers/openai/key_retrieve_agents_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/openai/key_retrieve_agents_params.py rename to src/gradientai/types/providers/openai/key_retrieve_agents_params.py diff --git a/src/digitalocean_genai_sdk/types/providers/openai/key_retrieve_agents_response.py b/src/gradientai/types/providers/openai/key_retrieve_agents_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/openai/key_retrieve_agents_response.py rename to src/gradientai/types/providers/openai/key_retrieve_agents_response.py diff --git a/src/digitalocean_genai_sdk/types/providers/openai/key_retrieve_response.py b/src/gradientai/types/providers/openai/key_retrieve_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/openai/key_retrieve_response.py rename to src/gradientai/types/providers/openai/key_retrieve_response.py diff --git a/src/digitalocean_genai_sdk/types/providers/openai/key_update_params.py b/src/gradientai/types/providers/openai/key_update_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/openai/key_update_params.py rename to src/gradientai/types/providers/openai/key_update_params.py diff --git a/src/digitalocean_genai_sdk/types/providers/openai/key_update_response.py b/src/gradientai/types/providers/openai/key_update_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/providers/openai/key_update_response.py rename to src/gradientai/types/providers/openai/key_update_response.py diff --git a/src/gradientai/types/region_list_evaluation_metrics_response.py b/src/gradientai/types/region_list_evaluation_metrics_response.py new file mode 100644 index 00000000..c57b71d1 --- /dev/null +++ b/src/gradientai/types/region_list_evaluation_metrics_response.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from .._models import BaseModel +from .api_evaluation_metric import APIEvaluationMetric + +__all__ = ["RegionListEvaluationMetricsResponse"] + + +class RegionListEvaluationMetricsResponse(BaseModel): + metrics: Optional[List[APIEvaluationMetric]] = None diff --git a/src/digitalocean_genai_sdk/types/region_list_params.py b/src/gradientai/types/region_list_params.py similarity index 100% rename from src/digitalocean_genai_sdk/types/region_list_params.py rename to src/gradientai/types/region_list_params.py diff --git a/src/digitalocean_genai_sdk/types/region_list_response.py b/src/gradientai/types/region_list_response.py similarity index 100% rename from src/digitalocean_genai_sdk/types/region_list_response.py rename to src/gradientai/types/region_list_response.py diff --git a/src/gradientai/types/regions/__init__.py b/src/gradientai/types/regions/__init__.py new file mode 100644 index 00000000..695ba3b4 --- /dev/null +++ b/src/gradientai/types/regions/__init__.py @@ -0,0 +1,32 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .api_star_metric import APIStarMetric as APIStarMetric +from .api_star_metric_param import APIStarMetricParam as APIStarMetricParam +from .api_evaluation_test_case import APIEvaluationTestCase as APIEvaluationTestCase +from .evaluation_run_create_params import EvaluationRunCreateParams as EvaluationRunCreateParams +from .evaluation_run_create_response import EvaluationRunCreateResponse as EvaluationRunCreateResponse +from .evaluation_dataset_create_params import EvaluationDatasetCreateParams as EvaluationDatasetCreateParams +from .evaluation_run_retrieve_response import EvaluationRunRetrieveResponse as EvaluationRunRetrieveResponse +from .evaluation_dataset_create_response import EvaluationDatasetCreateResponse as EvaluationDatasetCreateResponse +from .evaluation_test_case_create_params import EvaluationTestCaseCreateParams as EvaluationTestCaseCreateParams +from .evaluation_test_case_list_response import EvaluationTestCaseListResponse as EvaluationTestCaseListResponse +from .evaluation_test_case_update_params import EvaluationTestCaseUpdateParams as EvaluationTestCaseUpdateParams +from .evaluation_test_case_create_response import EvaluationTestCaseCreateResponse as EvaluationTestCaseCreateResponse +from .evaluation_test_case_update_response import EvaluationTestCaseUpdateResponse as EvaluationTestCaseUpdateResponse +from .evaluation_test_case_retrieve_response import ( + EvaluationTestCaseRetrieveResponse as EvaluationTestCaseRetrieveResponse, +) +from .evaluation_test_case_list_evaluation_runs_params import ( + EvaluationTestCaseListEvaluationRunsParams as EvaluationTestCaseListEvaluationRunsParams, +) +from .evaluation_test_case_list_evaluation_runs_response import ( + EvaluationTestCaseListEvaluationRunsResponse as EvaluationTestCaseListEvaluationRunsResponse, +) +from .evaluation_dataset_create_file_upload_presigned_urls_params import ( + EvaluationDatasetCreateFileUploadPresignedURLsParams as EvaluationDatasetCreateFileUploadPresignedURLsParams, +) +from .evaluation_dataset_create_file_upload_presigned_urls_response import ( + EvaluationDatasetCreateFileUploadPresignedURLsResponse as EvaluationDatasetCreateFileUploadPresignedURLsResponse, +) diff --git a/src/gradientai/types/regions/api_evaluation_test_case.py b/src/gradientai/types/regions/api_evaluation_test_case.py new file mode 100644 index 00000000..d799b0e0 --- /dev/null +++ b/src/gradientai/types/regions/api_evaluation_test_case.py @@ -0,0 +1,46 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from datetime import datetime + +from ..._models import BaseModel +from .api_star_metric import APIStarMetric +from ..api_evaluation_metric import APIEvaluationMetric + +__all__ = ["APIEvaluationTestCase"] + + +class APIEvaluationTestCase(BaseModel): + archived_at: Optional[datetime] = None + + created_at: Optional[datetime] = None + + created_by_user_email: Optional[str] = None + + created_by_user_id: Optional[str] = None + + dataset_name: Optional[str] = None + + dataset_uuid: Optional[str] = None + + description: Optional[str] = None + + latest_version_number_of_runs: Optional[int] = None + + metrics: Optional[List[APIEvaluationMetric]] = None + + name: Optional[str] = None + + star_metric: Optional[APIStarMetric] = None + + test_case_uuid: Optional[str] = None + + total_runs: Optional[int] = None + + updated_at: Optional[datetime] = None + + updated_by_user_email: Optional[str] = None + + updated_by_user_id: Optional[str] = None + + version: Optional[int] = None diff --git a/src/gradientai/types/regions/api_star_metric.py b/src/gradientai/types/regions/api_star_metric.py new file mode 100644 index 00000000..c9ecc60a --- /dev/null +++ b/src/gradientai/types/regions/api_star_metric.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel + +__all__ = ["APIStarMetric"] + + +class APIStarMetric(BaseModel): + metric_uuid: Optional[str] = None + + name: Optional[str] = None + + success_threshold_pct: Optional[int] = None + """ + The success threshold for the star metric. This is a percentage value between 0 + and 100. + """ diff --git a/src/gradientai/types/regions/api_star_metric_param.py b/src/gradientai/types/regions/api_star_metric_param.py new file mode 100644 index 00000000..5f7b2fd9 --- /dev/null +++ b/src/gradientai/types/regions/api_star_metric_param.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["APIStarMetricParam"] + + +class APIStarMetricParam(TypedDict, total=False): + metric_uuid: str + + name: str + + success_threshold_pct: int + """ + The success threshold for the star metric. This is a percentage value between 0 + and 100. + """ diff --git a/src/gradientai/types/regions/evaluation_dataset_create_file_upload_presigned_urls_params.py b/src/gradientai/types/regions/evaluation_dataset_create_file_upload_presigned_urls_params.py new file mode 100644 index 00000000..6aa6d27a --- /dev/null +++ b/src/gradientai/types/regions/evaluation_dataset_create_file_upload_presigned_urls_params.py @@ -0,0 +1,20 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Iterable +from typing_extensions import TypedDict + +__all__ = ["EvaluationDatasetCreateFileUploadPresignedURLsParams", "File"] + + +class EvaluationDatasetCreateFileUploadPresignedURLsParams(TypedDict, total=False): + files: Iterable[File] + """A list of files to generate presigned URLs for.""" + + +class File(TypedDict, total=False): + file_name: str + + file_size: str + """The size of the file in bytes.""" diff --git a/src/gradientai/types/regions/evaluation_dataset_create_file_upload_presigned_urls_response.py b/src/gradientai/types/regions/evaluation_dataset_create_file_upload_presigned_urls_response.py new file mode 100644 index 00000000..bee94c93 --- /dev/null +++ b/src/gradientai/types/regions/evaluation_dataset_create_file_upload_presigned_urls_response.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from datetime import datetime + +from ..._models import BaseModel + +__all__ = ["EvaluationDatasetCreateFileUploadPresignedURLsResponse", "Upload"] + + +class Upload(BaseModel): + expires_at: Optional[datetime] = None + """The time the url expires at.""" + + object_key: Optional[str] = None + """The unique object key to store the file as.""" + + original_file_name: Optional[str] = None + """The original file name.""" + + presigned_url: Optional[str] = None + """The actual presigned URL the client can use to upload the file directly.""" + + +class EvaluationDatasetCreateFileUploadPresignedURLsResponse(BaseModel): + request_id: Optional[str] = None + """The ID generated for the request for Presigned URLs.""" + + uploads: Optional[List[Upload]] = None + """A list of generated presigned URLs and object keys, one per file.""" diff --git a/src/gradientai/types/regions/evaluation_dataset_create_params.py b/src/gradientai/types/regions/evaluation_dataset_create_params.py new file mode 100644 index 00000000..c8a84c23 --- /dev/null +++ b/src/gradientai/types/regions/evaluation_dataset_create_params.py @@ -0,0 +1,17 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +from ..knowledge_bases.api_file_upload_data_source_param import APIFileUploadDataSourceParam + +__all__ = ["EvaluationDatasetCreateParams"] + + +class EvaluationDatasetCreateParams(TypedDict, total=False): + file_upload_dataset: APIFileUploadDataSourceParam + """File to upload as data source for knowledge base.""" + + name: str + """The name of the agent evaluation dataset.""" diff --git a/src/gradientai/types/regions/evaluation_dataset_create_response.py b/src/gradientai/types/regions/evaluation_dataset_create_response.py new file mode 100644 index 00000000..f5c7fbac --- /dev/null +++ b/src/gradientai/types/regions/evaluation_dataset_create_response.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel + +__all__ = ["EvaluationDatasetCreateResponse"] + + +class EvaluationDatasetCreateResponse(BaseModel): + evaluation_dataset_uuid: Optional[str] = None + """Evaluation dataset uuid.""" diff --git a/src/gradientai/types/regions/evaluation_run_create_params.py b/src/gradientai/types/regions/evaluation_run_create_params.py new file mode 100644 index 00000000..1ae2dbbb --- /dev/null +++ b/src/gradientai/types/regions/evaluation_run_create_params.py @@ -0,0 +1,17 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["EvaluationRunCreateParams"] + + +class EvaluationRunCreateParams(TypedDict, total=False): + agent_uuid: str + """Agent UUID to run the test case against.""" + + run_name: str + """The name of the run.""" + + test_case_uuid: str diff --git a/src/gradientai/types/regions/evaluation_run_create_response.py b/src/gradientai/types/regions/evaluation_run_create_response.py new file mode 100644 index 00000000..36942c29 --- /dev/null +++ b/src/gradientai/types/regions/evaluation_run_create_response.py @@ -0,0 +1,11 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel + +__all__ = ["EvaluationRunCreateResponse"] + + +class EvaluationRunCreateResponse(BaseModel): + evaluation_run_uuid: Optional[str] = None diff --git a/src/gradientai/types/regions/evaluation_run_retrieve_response.py b/src/gradientai/types/regions/evaluation_run_retrieve_response.py new file mode 100644 index 00000000..68d71978 --- /dev/null +++ b/src/gradientai/types/regions/evaluation_run_retrieve_response.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel +from .evaluation_runs.api_evaluation_run import APIEvaluationRun + +__all__ = ["EvaluationRunRetrieveResponse"] + + +class EvaluationRunRetrieveResponse(BaseModel): + evaluation_run: Optional[APIEvaluationRun] = None diff --git a/src/gradientai/types/regions/evaluation_runs/__init__.py b/src/gradientai/types/regions/evaluation_runs/__init__.py new file mode 100644 index 00000000..0ec4f2f6 --- /dev/null +++ b/src/gradientai/types/regions/evaluation_runs/__init__.py @@ -0,0 +1,9 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .api_prompt import APIPrompt as APIPrompt +from .api_evaluation_run import APIEvaluationRun as APIEvaluationRun +from .result_retrieve_response import ResultRetrieveResponse as ResultRetrieveResponse +from .api_evaluation_metric_result import APIEvaluationMetricResult as APIEvaluationMetricResult +from .result_retrieve_prompt_response import ResultRetrievePromptResponse as ResultRetrievePromptResponse diff --git a/src/gradientai/types/regions/evaluation_runs/api_evaluation_metric_result.py b/src/gradientai/types/regions/evaluation_runs/api_evaluation_metric_result.py new file mode 100644 index 00000000..cb50fd80 --- /dev/null +++ b/src/gradientai/types/regions/evaluation_runs/api_evaluation_metric_result.py @@ -0,0 +1,17 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ...._models import BaseModel + +__all__ = ["APIEvaluationMetricResult"] + + +class APIEvaluationMetricResult(BaseModel): + metric_name: Optional[str] = None + + number_value: Optional[float] = None + """The value of the metric as a number.""" + + string_value: Optional[str] = None + """The value of the metric as a string.""" diff --git a/src/gradientai/types/regions/evaluation_runs/api_evaluation_run.py b/src/gradientai/types/regions/evaluation_runs/api_evaluation_run.py new file mode 100644 index 00000000..7822f53c --- /dev/null +++ b/src/gradientai/types/regions/evaluation_runs/api_evaluation_run.py @@ -0,0 +1,56 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from datetime import datetime +from typing_extensions import Literal + +from ...._models import BaseModel +from .api_evaluation_metric_result import APIEvaluationMetricResult + +__all__ = ["APIEvaluationRun"] + + +class APIEvaluationRun(BaseModel): + agent_uuid: Optional[str] = None + """Agent UUID.""" + + agent_version_hash: Optional[str] = None + + evaluation_run_uuid: Optional[str] = None + """Evaluation run UUID.""" + + finished_at: Optional[datetime] = None + """Run end time.""" + + pass_status: Optional[bool] = None + """The pass status of the evaluation run based on the star metric.""" + + run_level_metric_results: Optional[List[APIEvaluationMetricResult]] = None + + run_name: Optional[str] = None + """Run name.""" + + star_metric_result: Optional[APIEvaluationMetricResult] = None + + started_at: Optional[datetime] = None + """Run start time.""" + + status: Optional[ + Literal[ + "EVALUATION_RUN_STATUS_UNSPECIFIED", + "EVALUATION_RUN_QUEUED", + "EVALUATION_RUN_RUNNING_DATASET", + "EVALUATION_RUN_EVALUATING_RESULTS", + "EVALUATION_RUN_CANCELLING", + "EVALUATION_RUN_CANCELLED", + "EVALUATION_RUN_SUCCESSFUL", + "EVALUATION_RUN_PARTIALLY_SUCCESSFUL", + "EVALUATION_RUN_FAILED", + ] + ] = None + + test_case_uuid: Optional[str] = None + """Test-case UUID.""" + + test_case_version: Optional[int] = None + """Test-case-version.""" diff --git a/src/gradientai/types/regions/evaluation_runs/api_prompt.py b/src/gradientai/types/regions/evaluation_runs/api_prompt.py new file mode 100644 index 00000000..fb5a51f4 --- /dev/null +++ b/src/gradientai/types/regions/evaluation_runs/api_prompt.py @@ -0,0 +1,42 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from ...._models import BaseModel +from .api_evaluation_metric_result import APIEvaluationMetricResult + +__all__ = ["APIPrompt", "PromptChunk"] + + +class PromptChunk(BaseModel): + chunk_usage_pct: Optional[float] = None + """The usage percentage of the chunk.""" + + chunk_used: Optional[bool] = None + """Indicates if the chunk was used in the prompt.""" + + index_uuid: Optional[str] = None + """The index uuid (Knowledge Base) of the chunk.""" + + source_name: Optional[str] = None + """The source name for the chunk, e.g., the file name or document title.""" + + text: Optional[str] = None + """Text content of the chunk.""" + + +class APIPrompt(BaseModel): + ground_truth: Optional[str] = None + """The ground truth for the prompt.""" + + input: Optional[str] = None + + output: Optional[str] = None + + prompt_chunks: Optional[List[PromptChunk]] = None + """The list of prompt chunks.""" + + prompt_id: Optional[int] = None + + prompt_level_metric_results: Optional[List[APIEvaluationMetricResult]] = None + """The metric results for the prompt.""" diff --git a/src/gradientai/types/regions/evaluation_runs/result_retrieve_prompt_response.py b/src/gradientai/types/regions/evaluation_runs/result_retrieve_prompt_response.py new file mode 100644 index 00000000..ebebee48 --- /dev/null +++ b/src/gradientai/types/regions/evaluation_runs/result_retrieve_prompt_response.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ...._models import BaseModel +from .api_prompt import APIPrompt + +__all__ = ["ResultRetrievePromptResponse"] + + +class ResultRetrievePromptResponse(BaseModel): + prompt: Optional[APIPrompt] = None diff --git a/src/gradientai/types/regions/evaluation_runs/result_retrieve_response.py b/src/gradientai/types/regions/evaluation_runs/result_retrieve_response.py new file mode 100644 index 00000000..27256353 --- /dev/null +++ b/src/gradientai/types/regions/evaluation_runs/result_retrieve_response.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from ...._models import BaseModel +from .api_prompt import APIPrompt +from .api_evaluation_run import APIEvaluationRun + +__all__ = ["ResultRetrieveResponse"] + + +class ResultRetrieveResponse(BaseModel): + evaluation_run: Optional[APIEvaluationRun] = None + + prompts: Optional[List[APIPrompt]] = None + """The prompt level results.""" diff --git a/src/gradientai/types/regions/evaluation_test_case_create_params.py b/src/gradientai/types/regions/evaluation_test_case_create_params.py new file mode 100644 index 00000000..51ce20c7 --- /dev/null +++ b/src/gradientai/types/regions/evaluation_test_case_create_params.py @@ -0,0 +1,29 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List +from typing_extensions import TypedDict + +from .api_star_metric_param import APIStarMetricParam + +__all__ = ["EvaluationTestCaseCreateParams"] + + +class EvaluationTestCaseCreateParams(TypedDict, total=False): + dataset_uuid: str + """Dataset against which the test‑case is executed.""" + + description: str + """Description of the test case.""" + + metrics: List[str] + """Full metric list to use for evaluation test case.""" + + name: str + """Name of the test case.""" + + star_metric: APIStarMetricParam + + workspace_uuid: str + """The workspace uuid.""" diff --git a/src/gradientai/types/regions/evaluation_test_case_create_response.py b/src/gradientai/types/regions/evaluation_test_case_create_response.py new file mode 100644 index 00000000..9f8e37f4 --- /dev/null +++ b/src/gradientai/types/regions/evaluation_test_case_create_response.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel + +__all__ = ["EvaluationTestCaseCreateResponse"] + + +class EvaluationTestCaseCreateResponse(BaseModel): + test_case_uuid: Optional[str] = None + """Test‑case UUID.""" diff --git a/src/gradientai/types/regions/evaluation_test_case_list_evaluation_runs_params.py b/src/gradientai/types/regions/evaluation_test_case_list_evaluation_runs_params.py new file mode 100644 index 00000000..7f30ee28 --- /dev/null +++ b/src/gradientai/types/regions/evaluation_test_case_list_evaluation_runs_params.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["EvaluationTestCaseListEvaluationRunsParams"] + + +class EvaluationTestCaseListEvaluationRunsParams(TypedDict, total=False): + evaluation_test_case_version: int + """Version of the test case.""" diff --git a/src/gradientai/types/regions/evaluation_test_case_list_evaluation_runs_response.py b/src/gradientai/types/regions/evaluation_test_case_list_evaluation_runs_response.py new file mode 100644 index 00000000..4233d0ec --- /dev/null +++ b/src/gradientai/types/regions/evaluation_test_case_list_evaluation_runs_response.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from ..._models import BaseModel +from .evaluation_runs.api_evaluation_run import APIEvaluationRun + +__all__ = ["EvaluationTestCaseListEvaluationRunsResponse"] + + +class EvaluationTestCaseListEvaluationRunsResponse(BaseModel): + evaluation_runs: Optional[List[APIEvaluationRun]] = None + """List of evaluation runs.""" diff --git a/src/gradientai/types/regions/evaluation_test_case_list_response.py b/src/gradientai/types/regions/evaluation_test_case_list_response.py new file mode 100644 index 00000000..ccfc263e --- /dev/null +++ b/src/gradientai/types/regions/evaluation_test_case_list_response.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional + +from ..._models import BaseModel +from .api_evaluation_test_case import APIEvaluationTestCase + +__all__ = ["EvaluationTestCaseListResponse"] + + +class EvaluationTestCaseListResponse(BaseModel): + evaluation_test_cases: Optional[List[APIEvaluationTestCase]] = None diff --git a/src/gradientai/types/regions/evaluation_test_case_retrieve_response.py b/src/gradientai/types/regions/evaluation_test_case_retrieve_response.py new file mode 100644 index 00000000..1511ba74 --- /dev/null +++ b/src/gradientai/types/regions/evaluation_test_case_retrieve_response.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel +from .api_evaluation_test_case import APIEvaluationTestCase + +__all__ = ["EvaluationTestCaseRetrieveResponse"] + + +class EvaluationTestCaseRetrieveResponse(BaseModel): + evaluation_test_case: Optional[APIEvaluationTestCase] = None diff --git a/src/gradientai/types/regions/evaluation_test_case_update_params.py b/src/gradientai/types/regions/evaluation_test_case_update_params.py new file mode 100644 index 00000000..be70fc95 --- /dev/null +++ b/src/gradientai/types/regions/evaluation_test_case_update_params.py @@ -0,0 +1,32 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import List +from typing_extensions import Annotated, TypedDict + +from ..._utils import PropertyInfo +from .api_star_metric_param import APIStarMetricParam + +__all__ = ["EvaluationTestCaseUpdateParams", "Metrics"] + + +class EvaluationTestCaseUpdateParams(TypedDict, total=False): + dataset_uuid: str + """Dataset against which the test‑case is executed.""" + + description: str + """Description of the test case.""" + + metrics: Metrics + + name: str + """Name of the test case.""" + + star_metric: APIStarMetricParam + + body_test_case_uuid: Annotated[str, PropertyInfo(alias="test_case_uuid")] + + +class Metrics(TypedDict, total=False): + metric_uuids: List[str] diff --git a/src/gradientai/types/regions/evaluation_test_case_update_response.py b/src/gradientai/types/regions/evaluation_test_case_update_response.py new file mode 100644 index 00000000..6f8e3b04 --- /dev/null +++ b/src/gradientai/types/regions/evaluation_test_case_update_response.py @@ -0,0 +1,14 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional + +from ..._models import BaseModel + +__all__ = ["EvaluationTestCaseUpdateResponse"] + + +class EvaluationTestCaseUpdateResponse(BaseModel): + test_case_uuid: Optional[str] = None + + version: Optional[int] = None + """The new verson of the test case.""" diff --git a/tests/api_resources/agents/test_api_keys.py b/tests/api_resources/agents/test_api_keys.py index 911ac6f9..beb9666a 100644 --- a/tests/api_resources/agents/test_api_keys.py +++ b/tests/api_resources/agents/test_api_keys.py @@ -7,9 +7,9 @@ import pytest +from gradientai import GradientAI, AsyncGradientAI from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types.agents import ( +from gradientai.types.agents import ( APIKeyListResponse, APIKeyCreateResponse, APIKeyDeleteResponse, @@ -25,7 +25,7 @@ class TestAPIKeys: @pytest.mark.skip() @parametrize - def test_method_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_create(self, client: GradientAI) -> None: api_key = client.agents.api_keys.create( path_agent_uuid="agent_uuid", ) @@ -33,7 +33,7 @@ def test_method_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_create_with_all_params(self, client: GradientAI) -> None: api_key = client.agents.api_keys.create( path_agent_uuid="agent_uuid", body_agent_uuid="agent_uuid", @@ -43,7 +43,7 @@ def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_create(self, client: GradientAI) -> None: response = client.agents.api_keys.with_raw_response.create( path_agent_uuid="agent_uuid", ) @@ -55,7 +55,7 @@ def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_create(self, client: GradientAI) -> None: with client.agents.api_keys.with_streaming_response.create( path_agent_uuid="agent_uuid", ) as response: @@ -69,7 +69,7 @@ def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_create(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): client.agents.api_keys.with_raw_response.create( path_agent_uuid="", @@ -77,7 +77,7 @@ def test_path_params_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update(self, client: GradientAI) -> None: api_key = client.agents.api_keys.update( path_api_key_uuid="api_key_uuid", path_agent_uuid="agent_uuid", @@ -86,7 +86,7 @@ def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update_with_all_params(self, client: GradientAI) -> None: api_key = client.agents.api_keys.update( path_api_key_uuid="api_key_uuid", path_agent_uuid="agent_uuid", @@ -98,7 +98,7 @@ def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_update(self, client: GradientAI) -> None: response = client.agents.api_keys.with_raw_response.update( path_api_key_uuid="api_key_uuid", path_agent_uuid="agent_uuid", @@ -111,7 +111,7 @@ def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_update(self, client: GradientAI) -> None: with client.agents.api_keys.with_streaming_response.update( path_api_key_uuid="api_key_uuid", path_agent_uuid="agent_uuid", @@ -126,7 +126,7 @@ def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_update(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): client.agents.api_keys.with_raw_response.update( path_api_key_uuid="api_key_uuid", @@ -141,7 +141,7 @@ def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list(self, client: GradientAI) -> None: api_key = client.agents.api_keys.list( agent_uuid="agent_uuid", ) @@ -149,7 +149,7 @@ def test_method_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list_with_all_params(self, client: GradientAI) -> None: api_key = client.agents.api_keys.list( agent_uuid="agent_uuid", page=0, @@ -159,7 +159,7 @@ def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_list(self, client: GradientAI) -> None: response = client.agents.api_keys.with_raw_response.list( agent_uuid="agent_uuid", ) @@ -171,7 +171,7 @@ def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_list(self, client: GradientAI) -> None: with client.agents.api_keys.with_streaming_response.list( agent_uuid="agent_uuid", ) as response: @@ -185,7 +185,7 @@ def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_list(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): client.agents.api_keys.with_raw_response.list( agent_uuid="", @@ -193,7 +193,7 @@ def test_path_params_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_delete(self, client: GradientAI) -> None: api_key = client.agents.api_keys.delete( api_key_uuid="api_key_uuid", agent_uuid="agent_uuid", @@ -202,7 +202,7 @@ def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_delete(self, client: GradientAI) -> None: response = client.agents.api_keys.with_raw_response.delete( api_key_uuid="api_key_uuid", agent_uuid="agent_uuid", @@ -215,7 +215,7 @@ def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_delete(self, client: GradientAI) -> None: with client.agents.api_keys.with_streaming_response.delete( api_key_uuid="api_key_uuid", agent_uuid="agent_uuid", @@ -230,7 +230,7 @@ def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_delete(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): client.agents.api_keys.with_raw_response.delete( api_key_uuid="api_key_uuid", @@ -245,7 +245,7 @@ def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_regenerate(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_regenerate(self, client: GradientAI) -> None: api_key = client.agents.api_keys.regenerate( api_key_uuid="api_key_uuid", agent_uuid="agent_uuid", @@ -254,7 +254,7 @@ def test_method_regenerate(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_regenerate(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_regenerate(self, client: GradientAI) -> None: response = client.agents.api_keys.with_raw_response.regenerate( api_key_uuid="api_key_uuid", agent_uuid="agent_uuid", @@ -267,7 +267,7 @@ def test_raw_response_regenerate(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_regenerate(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_regenerate(self, client: GradientAI) -> None: with client.agents.api_keys.with_streaming_response.regenerate( api_key_uuid="api_key_uuid", agent_uuid="agent_uuid", @@ -282,7 +282,7 @@ def test_streaming_response_regenerate(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_path_params_regenerate(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_regenerate(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): client.agents.api_keys.with_raw_response.regenerate( api_key_uuid="api_key_uuid", @@ -297,11 +297,13 @@ def test_path_params_regenerate(self, client: DigitaloceanGenaiSDK) -> None: class TestAsyncAPIKeys: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_create(self, async_client: AsyncGradientAI) -> None: api_key = await async_client.agents.api_keys.create( path_agent_uuid="agent_uuid", ) @@ -309,7 +311,7 @@ async def test_method_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: api_key = await async_client.agents.api_keys.create( path_agent_uuid="agent_uuid", body_agent_uuid="agent_uuid", @@ -319,7 +321,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.api_keys.with_raw_response.create( path_agent_uuid="agent_uuid", ) @@ -331,7 +333,7 @@ async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.api_keys.with_streaming_response.create( path_agent_uuid="agent_uuid", ) as response: @@ -345,7 +347,7 @@ async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_create(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): await async_client.agents.api_keys.with_raw_response.create( path_agent_uuid="", @@ -353,7 +355,7 @@ async def test_path_params_create(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update(self, async_client: AsyncGradientAI) -> None: api_key = await async_client.agents.api_keys.update( path_api_key_uuid="api_key_uuid", path_agent_uuid="agent_uuid", @@ -362,7 +364,7 @@ async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: api_key = await async_client.agents.api_keys.update( path_api_key_uuid="api_key_uuid", path_agent_uuid="agent_uuid", @@ -374,7 +376,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.api_keys.with_raw_response.update( path_api_key_uuid="api_key_uuid", path_agent_uuid="agent_uuid", @@ -387,7 +389,7 @@ async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.api_keys.with_streaming_response.update( path_api_key_uuid="api_key_uuid", path_agent_uuid="agent_uuid", @@ -402,7 +404,7 @@ async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): await async_client.agents.api_keys.with_raw_response.update( path_api_key_uuid="api_key_uuid", @@ -417,7 +419,7 @@ async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list(self, async_client: AsyncGradientAI) -> None: api_key = await async_client.agents.api_keys.list( agent_uuid="agent_uuid", ) @@ -425,7 +427,7 @@ async def test_method_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> Non @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: api_key = await async_client.agents.api_keys.list( agent_uuid="agent_uuid", page=0, @@ -435,7 +437,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncDigitalocean @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.api_keys.with_raw_response.list( agent_uuid="agent_uuid", ) @@ -447,7 +449,7 @@ async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.api_keys.with_streaming_response.list( agent_uuid="agent_uuid", ) as response: @@ -461,7 +463,7 @@ async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGena @pytest.mark.skip() @parametrize - async def test_path_params_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_list(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): await async_client.agents.api_keys.with_raw_response.list( agent_uuid="", @@ -469,7 +471,7 @@ async def test_path_params_list(self, async_client: AsyncDigitaloceanGenaiSDK) - @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_delete(self, async_client: AsyncGradientAI) -> None: api_key = await async_client.agents.api_keys.delete( api_key_uuid="api_key_uuid", agent_uuid="agent_uuid", @@ -478,7 +480,7 @@ async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.api_keys.with_raw_response.delete( api_key_uuid="api_key_uuid", agent_uuid="agent_uuid", @@ -491,7 +493,7 @@ async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.api_keys.with_streaming_response.delete( api_key_uuid="api_key_uuid", agent_uuid="agent_uuid", @@ -506,7 +508,7 @@ async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): await async_client.agents.api_keys.with_raw_response.delete( api_key_uuid="api_key_uuid", @@ -521,7 +523,7 @@ async def test_path_params_delete(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_regenerate(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_regenerate(self, async_client: AsyncGradientAI) -> None: api_key = await async_client.agents.api_keys.regenerate( api_key_uuid="api_key_uuid", agent_uuid="agent_uuid", @@ -530,7 +532,7 @@ async def test_method_regenerate(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_raw_response_regenerate(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_regenerate(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.api_keys.with_raw_response.regenerate( api_key_uuid="api_key_uuid", agent_uuid="agent_uuid", @@ -543,7 +545,7 @@ async def test_raw_response_regenerate(self, async_client: AsyncDigitaloceanGena @pytest.mark.skip() @parametrize - async def test_streaming_response_regenerate(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_regenerate(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.api_keys.with_streaming_response.regenerate( api_key_uuid="api_key_uuid", agent_uuid="agent_uuid", @@ -558,7 +560,7 @@ async def test_streaming_response_regenerate(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_path_params_regenerate(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_regenerate(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): await async_client.agents.api_keys.with_raw_response.regenerate( api_key_uuid="api_key_uuid", diff --git a/tests/api_resources/agents/test_child_agents.py b/tests/api_resources/agents/test_child_agents.py index cfc8084e..daa7b10e 100644 --- a/tests/api_resources/agents/test_child_agents.py +++ b/tests/api_resources/agents/test_child_agents.py @@ -7,9 +7,9 @@ import pytest +from gradientai import GradientAI, AsyncGradientAI from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types.agents import ( +from gradientai.types.agents import ( ChildAgentAddResponse, ChildAgentViewResponse, ChildAgentDeleteResponse, @@ -24,7 +24,7 @@ class TestChildAgents: @pytest.mark.skip() @parametrize - def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update(self, client: GradientAI) -> None: child_agent = client.agents.child_agents.update( path_child_agent_uuid="child_agent_uuid", path_parent_agent_uuid="parent_agent_uuid", @@ -33,7 +33,7 @@ def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update_with_all_params(self, client: GradientAI) -> None: child_agent = client.agents.child_agents.update( path_child_agent_uuid="child_agent_uuid", path_parent_agent_uuid="parent_agent_uuid", @@ -47,7 +47,7 @@ def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_update(self, client: GradientAI) -> None: response = client.agents.child_agents.with_raw_response.update( path_child_agent_uuid="child_agent_uuid", path_parent_agent_uuid="parent_agent_uuid", @@ -60,7 +60,7 @@ def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_update(self, client: GradientAI) -> None: with client.agents.child_agents.with_streaming_response.update( path_child_agent_uuid="child_agent_uuid", path_parent_agent_uuid="parent_agent_uuid", @@ -75,7 +75,7 @@ def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_update(self, client: GradientAI) -> None: with pytest.raises( ValueError, match=r"Expected a non-empty value for `path_parent_agent_uuid` but received ''" ): @@ -92,7 +92,7 @@ def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_delete(self, client: GradientAI) -> None: child_agent = client.agents.child_agents.delete( child_agent_uuid="child_agent_uuid", parent_agent_uuid="parent_agent_uuid", @@ -101,7 +101,7 @@ def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_delete(self, client: GradientAI) -> None: response = client.agents.child_agents.with_raw_response.delete( child_agent_uuid="child_agent_uuid", parent_agent_uuid="parent_agent_uuid", @@ -114,7 +114,7 @@ def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_delete(self, client: GradientAI) -> None: with client.agents.child_agents.with_streaming_response.delete( child_agent_uuid="child_agent_uuid", parent_agent_uuid="parent_agent_uuid", @@ -129,7 +129,7 @@ def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_delete(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `parent_agent_uuid` but received ''"): client.agents.child_agents.with_raw_response.delete( child_agent_uuid="child_agent_uuid", @@ -144,7 +144,7 @@ def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_add(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_add(self, client: GradientAI) -> None: child_agent = client.agents.child_agents.add( path_child_agent_uuid="child_agent_uuid", path_parent_agent_uuid="parent_agent_uuid", @@ -153,7 +153,7 @@ def test_method_add(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_add_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_add_with_all_params(self, client: GradientAI) -> None: child_agent = client.agents.child_agents.add( path_child_agent_uuid="child_agent_uuid", path_parent_agent_uuid="parent_agent_uuid", @@ -166,7 +166,7 @@ def test_method_add_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_add(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_add(self, client: GradientAI) -> None: response = client.agents.child_agents.with_raw_response.add( path_child_agent_uuid="child_agent_uuid", path_parent_agent_uuid="parent_agent_uuid", @@ -179,7 +179,7 @@ def test_raw_response_add(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_add(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_add(self, client: GradientAI) -> None: with client.agents.child_agents.with_streaming_response.add( path_child_agent_uuid="child_agent_uuid", path_parent_agent_uuid="parent_agent_uuid", @@ -194,7 +194,7 @@ def test_streaming_response_add(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_add(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_add(self, client: GradientAI) -> None: with pytest.raises( ValueError, match=r"Expected a non-empty value for `path_parent_agent_uuid` but received ''" ): @@ -211,7 +211,7 @@ def test_path_params_add(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_view(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_view(self, client: GradientAI) -> None: child_agent = client.agents.child_agents.view( "uuid", ) @@ -219,7 +219,7 @@ def test_method_view(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_view(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_view(self, client: GradientAI) -> None: response = client.agents.child_agents.with_raw_response.view( "uuid", ) @@ -231,7 +231,7 @@ def test_raw_response_view(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_view(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_view(self, client: GradientAI) -> None: with client.agents.child_agents.with_streaming_response.view( "uuid", ) as response: @@ -245,7 +245,7 @@ def test_streaming_response_view(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_view(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_view(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.agents.child_agents.with_raw_response.view( "", @@ -253,11 +253,13 @@ def test_path_params_view(self, client: DigitaloceanGenaiSDK) -> None: class TestAsyncChildAgents: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update(self, async_client: AsyncGradientAI) -> None: child_agent = await async_client.agents.child_agents.update( path_child_agent_uuid="child_agent_uuid", path_parent_agent_uuid="parent_agent_uuid", @@ -266,7 +268,7 @@ async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: child_agent = await async_client.agents.child_agents.update( path_child_agent_uuid="child_agent_uuid", path_parent_agent_uuid="parent_agent_uuid", @@ -280,7 +282,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.child_agents.with_raw_response.update( path_child_agent_uuid="child_agent_uuid", path_parent_agent_uuid="parent_agent_uuid", @@ -293,7 +295,7 @@ async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.child_agents.with_streaming_response.update( path_child_agent_uuid="child_agent_uuid", path_parent_agent_uuid="parent_agent_uuid", @@ -308,7 +310,7 @@ async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: with pytest.raises( ValueError, match=r"Expected a non-empty value for `path_parent_agent_uuid` but received ''" ): @@ -325,7 +327,7 @@ async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_delete(self, async_client: AsyncGradientAI) -> None: child_agent = await async_client.agents.child_agents.delete( child_agent_uuid="child_agent_uuid", parent_agent_uuid="parent_agent_uuid", @@ -334,7 +336,7 @@ async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.child_agents.with_raw_response.delete( child_agent_uuid="child_agent_uuid", parent_agent_uuid="parent_agent_uuid", @@ -347,7 +349,7 @@ async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.child_agents.with_streaming_response.delete( child_agent_uuid="child_agent_uuid", parent_agent_uuid="parent_agent_uuid", @@ -362,7 +364,7 @@ async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `parent_agent_uuid` but received ''"): await async_client.agents.child_agents.with_raw_response.delete( child_agent_uuid="child_agent_uuid", @@ -377,7 +379,7 @@ async def test_path_params_delete(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_add(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_add(self, async_client: AsyncGradientAI) -> None: child_agent = await async_client.agents.child_agents.add( path_child_agent_uuid="child_agent_uuid", path_parent_agent_uuid="parent_agent_uuid", @@ -386,7 +388,7 @@ async def test_method_add(self, async_client: AsyncDigitaloceanGenaiSDK) -> None @pytest.mark.skip() @parametrize - async def test_method_add_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_add_with_all_params(self, async_client: AsyncGradientAI) -> None: child_agent = await async_client.agents.child_agents.add( path_child_agent_uuid="child_agent_uuid", path_parent_agent_uuid="parent_agent_uuid", @@ -399,7 +401,7 @@ async def test_method_add_with_all_params(self, async_client: AsyncDigitaloceanG @pytest.mark.skip() @parametrize - async def test_raw_response_add(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.child_agents.with_raw_response.add( path_child_agent_uuid="child_agent_uuid", path_parent_agent_uuid="parent_agent_uuid", @@ -412,7 +414,7 @@ async def test_raw_response_add(self, async_client: AsyncDigitaloceanGenaiSDK) - @pytest.mark.skip() @parametrize - async def test_streaming_response_add(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.child_agents.with_streaming_response.add( path_child_agent_uuid="child_agent_uuid", path_parent_agent_uuid="parent_agent_uuid", @@ -427,7 +429,7 @@ async def test_streaming_response_add(self, async_client: AsyncDigitaloceanGenai @pytest.mark.skip() @parametrize - async def test_path_params_add(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_add(self, async_client: AsyncGradientAI) -> None: with pytest.raises( ValueError, match=r"Expected a non-empty value for `path_parent_agent_uuid` but received ''" ): @@ -444,7 +446,7 @@ async def test_path_params_add(self, async_client: AsyncDigitaloceanGenaiSDK) -> @pytest.mark.skip() @parametrize - async def test_method_view(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_view(self, async_client: AsyncGradientAI) -> None: child_agent = await async_client.agents.child_agents.view( "uuid", ) @@ -452,7 +454,7 @@ async def test_method_view(self, async_client: AsyncDigitaloceanGenaiSDK) -> Non @pytest.mark.skip() @parametrize - async def test_raw_response_view(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_view(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.child_agents.with_raw_response.view( "uuid", ) @@ -464,7 +466,7 @@ async def test_raw_response_view(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_streaming_response_view(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_view(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.child_agents.with_streaming_response.view( "uuid", ) as response: @@ -478,7 +480,7 @@ async def test_streaming_response_view(self, async_client: AsyncDigitaloceanGena @pytest.mark.skip() @parametrize - async def test_path_params_view(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_view(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.agents.child_agents.with_raw_response.view( "", diff --git a/tests/api_resources/agents/test_functions.py b/tests/api_resources/agents/test_functions.py index d66590ba..5a3693cb 100644 --- a/tests/api_resources/agents/test_functions.py +++ b/tests/api_resources/agents/test_functions.py @@ -7,9 +7,9 @@ import pytest +from gradientai import GradientAI, AsyncGradientAI from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types.agents import ( +from gradientai.types.agents import ( FunctionCreateResponse, FunctionDeleteResponse, FunctionUpdateResponse, @@ -23,7 +23,7 @@ class TestFunctions: @pytest.mark.skip() @parametrize - def test_method_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_create(self, client: GradientAI) -> None: function = client.agents.functions.create( path_agent_uuid="agent_uuid", ) @@ -31,7 +31,7 @@ def test_method_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_create_with_all_params(self, client: GradientAI) -> None: function = client.agents.functions.create( path_agent_uuid="agent_uuid", body_agent_uuid="agent_uuid", @@ -46,7 +46,7 @@ def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_create(self, client: GradientAI) -> None: response = client.agents.functions.with_raw_response.create( path_agent_uuid="agent_uuid", ) @@ -58,7 +58,7 @@ def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_create(self, client: GradientAI) -> None: with client.agents.functions.with_streaming_response.create( path_agent_uuid="agent_uuid", ) as response: @@ -72,7 +72,7 @@ def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_create(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): client.agents.functions.with_raw_response.create( path_agent_uuid="", @@ -80,7 +80,7 @@ def test_path_params_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update(self, client: GradientAI) -> None: function = client.agents.functions.update( path_function_uuid="function_uuid", path_agent_uuid="agent_uuid", @@ -89,7 +89,7 @@ def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update_with_all_params(self, client: GradientAI) -> None: function = client.agents.functions.update( path_function_uuid="function_uuid", path_agent_uuid="agent_uuid", @@ -106,7 +106,7 @@ def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_update(self, client: GradientAI) -> None: response = client.agents.functions.with_raw_response.update( path_function_uuid="function_uuid", path_agent_uuid="agent_uuid", @@ -119,7 +119,7 @@ def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_update(self, client: GradientAI) -> None: with client.agents.functions.with_streaming_response.update( path_function_uuid="function_uuid", path_agent_uuid="agent_uuid", @@ -134,7 +134,7 @@ def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_update(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): client.agents.functions.with_raw_response.update( path_function_uuid="function_uuid", @@ -149,7 +149,7 @@ def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_delete(self, client: GradientAI) -> None: function = client.agents.functions.delete( function_uuid="function_uuid", agent_uuid="agent_uuid", @@ -158,7 +158,7 @@ def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_delete(self, client: GradientAI) -> None: response = client.agents.functions.with_raw_response.delete( function_uuid="function_uuid", agent_uuid="agent_uuid", @@ -171,7 +171,7 @@ def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_delete(self, client: GradientAI) -> None: with client.agents.functions.with_streaming_response.delete( function_uuid="function_uuid", agent_uuid="agent_uuid", @@ -186,7 +186,7 @@ def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_delete(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): client.agents.functions.with_raw_response.delete( function_uuid="function_uuid", @@ -201,11 +201,13 @@ def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: class TestAsyncFunctions: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_create(self, async_client: AsyncGradientAI) -> None: function = await async_client.agents.functions.create( path_agent_uuid="agent_uuid", ) @@ -213,7 +215,7 @@ async def test_method_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: function = await async_client.agents.functions.create( path_agent_uuid="agent_uuid", body_agent_uuid="agent_uuid", @@ -228,7 +230,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.functions.with_raw_response.create( path_agent_uuid="agent_uuid", ) @@ -240,7 +242,7 @@ async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.functions.with_streaming_response.create( path_agent_uuid="agent_uuid", ) as response: @@ -254,7 +256,7 @@ async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_create(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): await async_client.agents.functions.with_raw_response.create( path_agent_uuid="", @@ -262,7 +264,7 @@ async def test_path_params_create(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update(self, async_client: AsyncGradientAI) -> None: function = await async_client.agents.functions.update( path_function_uuid="function_uuid", path_agent_uuid="agent_uuid", @@ -271,7 +273,7 @@ async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: function = await async_client.agents.functions.update( path_function_uuid="function_uuid", path_agent_uuid="agent_uuid", @@ -288,7 +290,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.functions.with_raw_response.update( path_function_uuid="function_uuid", path_agent_uuid="agent_uuid", @@ -301,7 +303,7 @@ async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.functions.with_streaming_response.update( path_function_uuid="function_uuid", path_agent_uuid="agent_uuid", @@ -316,7 +318,7 @@ async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): await async_client.agents.functions.with_raw_response.update( path_function_uuid="function_uuid", @@ -331,7 +333,7 @@ async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_delete(self, async_client: AsyncGradientAI) -> None: function = await async_client.agents.functions.delete( function_uuid="function_uuid", agent_uuid="agent_uuid", @@ -340,7 +342,7 @@ async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.functions.with_raw_response.delete( function_uuid="function_uuid", agent_uuid="agent_uuid", @@ -353,7 +355,7 @@ async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.functions.with_streaming_response.delete( function_uuid="function_uuid", agent_uuid="agent_uuid", @@ -368,7 +370,7 @@ async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): await async_client.agents.functions.with_raw_response.delete( function_uuid="function_uuid", diff --git a/tests/api_resources/agents/test_knowledge_bases.py b/tests/api_resources/agents/test_knowledge_bases.py index b313b1af..e62c05ff 100644 --- a/tests/api_resources/agents/test_knowledge_bases.py +++ b/tests/api_resources/agents/test_knowledge_bases.py @@ -7,9 +7,9 @@ import pytest +from gradientai import GradientAI, AsyncGradientAI from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types.agents import APILinkKnowledgeBaseOutput, KnowledgeBaseDetachResponse +from gradientai.types.agents import APILinkKnowledgeBaseOutput, KnowledgeBaseDetachResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -19,7 +19,7 @@ class TestKnowledgeBases: @pytest.mark.skip() @parametrize - def test_method_attach(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_attach(self, client: GradientAI) -> None: knowledge_base = client.agents.knowledge_bases.attach( "agent_uuid", ) @@ -27,7 +27,7 @@ def test_method_attach(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_attach(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_attach(self, client: GradientAI) -> None: response = client.agents.knowledge_bases.with_raw_response.attach( "agent_uuid", ) @@ -39,7 +39,7 @@ def test_raw_response_attach(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_attach(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_attach(self, client: GradientAI) -> None: with client.agents.knowledge_bases.with_streaming_response.attach( "agent_uuid", ) as response: @@ -53,7 +53,7 @@ def test_streaming_response_attach(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_attach(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_attach(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): client.agents.knowledge_bases.with_raw_response.attach( "", @@ -61,7 +61,7 @@ def test_path_params_attach(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_attach_single(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_attach_single(self, client: GradientAI) -> None: knowledge_base = client.agents.knowledge_bases.attach_single( knowledge_base_uuid="knowledge_base_uuid", agent_uuid="agent_uuid", @@ -70,7 +70,7 @@ def test_method_attach_single(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_attach_single(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_attach_single(self, client: GradientAI) -> None: response = client.agents.knowledge_bases.with_raw_response.attach_single( knowledge_base_uuid="knowledge_base_uuid", agent_uuid="agent_uuid", @@ -83,7 +83,7 @@ def test_raw_response_attach_single(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_attach_single(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_attach_single(self, client: GradientAI) -> None: with client.agents.knowledge_bases.with_streaming_response.attach_single( knowledge_base_uuid="knowledge_base_uuid", agent_uuid="agent_uuid", @@ -98,7 +98,7 @@ def test_streaming_response_attach_single(self, client: DigitaloceanGenaiSDK) -> @pytest.mark.skip() @parametrize - def test_path_params_attach_single(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_attach_single(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): client.agents.knowledge_bases.with_raw_response.attach_single( knowledge_base_uuid="knowledge_base_uuid", @@ -113,7 +113,7 @@ def test_path_params_attach_single(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_detach(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_detach(self, client: GradientAI) -> None: knowledge_base = client.agents.knowledge_bases.detach( knowledge_base_uuid="knowledge_base_uuid", agent_uuid="agent_uuid", @@ -122,7 +122,7 @@ def test_method_detach(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_detach(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_detach(self, client: GradientAI) -> None: response = client.agents.knowledge_bases.with_raw_response.detach( knowledge_base_uuid="knowledge_base_uuid", agent_uuid="agent_uuid", @@ -135,7 +135,7 @@ def test_raw_response_detach(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_detach(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_detach(self, client: GradientAI) -> None: with client.agents.knowledge_bases.with_streaming_response.detach( knowledge_base_uuid="knowledge_base_uuid", agent_uuid="agent_uuid", @@ -150,7 +150,7 @@ def test_streaming_response_detach(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_detach(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_detach(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): client.agents.knowledge_bases.with_raw_response.detach( knowledge_base_uuid="knowledge_base_uuid", @@ -165,11 +165,13 @@ def test_path_params_detach(self, client: DigitaloceanGenaiSDK) -> None: class TestAsyncKnowledgeBases: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @pytest.mark.skip() @parametrize - async def test_method_attach(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_attach(self, async_client: AsyncGradientAI) -> None: knowledge_base = await async_client.agents.knowledge_bases.attach( "agent_uuid", ) @@ -177,7 +179,7 @@ async def test_method_attach(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_raw_response_attach(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_attach(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.knowledge_bases.with_raw_response.attach( "agent_uuid", ) @@ -189,7 +191,7 @@ async def test_raw_response_attach(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_attach(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_attach(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.knowledge_bases.with_streaming_response.attach( "agent_uuid", ) as response: @@ -203,7 +205,7 @@ async def test_streaming_response_attach(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_attach(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_attach(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): await async_client.agents.knowledge_bases.with_raw_response.attach( "", @@ -211,7 +213,7 @@ async def test_path_params_attach(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_attach_single(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_attach_single(self, async_client: AsyncGradientAI) -> None: knowledge_base = await async_client.agents.knowledge_bases.attach_single( knowledge_base_uuid="knowledge_base_uuid", agent_uuid="agent_uuid", @@ -220,7 +222,7 @@ async def test_method_attach_single(self, async_client: AsyncDigitaloceanGenaiSD @pytest.mark.skip() @parametrize - async def test_raw_response_attach_single(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_attach_single(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.knowledge_bases.with_raw_response.attach_single( knowledge_base_uuid="knowledge_base_uuid", agent_uuid="agent_uuid", @@ -233,7 +235,7 @@ async def test_raw_response_attach_single(self, async_client: AsyncDigitaloceanG @pytest.mark.skip() @parametrize - async def test_streaming_response_attach_single(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_attach_single(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.knowledge_bases.with_streaming_response.attach_single( knowledge_base_uuid="knowledge_base_uuid", agent_uuid="agent_uuid", @@ -248,7 +250,7 @@ async def test_streaming_response_attach_single(self, async_client: AsyncDigital @pytest.mark.skip() @parametrize - async def test_path_params_attach_single(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_attach_single(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): await async_client.agents.knowledge_bases.with_raw_response.attach_single( knowledge_base_uuid="knowledge_base_uuid", @@ -263,7 +265,7 @@ async def test_path_params_attach_single(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_method_detach(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_detach(self, async_client: AsyncGradientAI) -> None: knowledge_base = await async_client.agents.knowledge_bases.detach( knowledge_base_uuid="knowledge_base_uuid", agent_uuid="agent_uuid", @@ -272,7 +274,7 @@ async def test_method_detach(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_raw_response_detach(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_detach(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.knowledge_bases.with_raw_response.detach( knowledge_base_uuid="knowledge_base_uuid", agent_uuid="agent_uuid", @@ -285,7 +287,7 @@ async def test_raw_response_detach(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_detach(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_detach(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.knowledge_bases.with_streaming_response.detach( knowledge_base_uuid="knowledge_base_uuid", agent_uuid="agent_uuid", @@ -300,7 +302,7 @@ async def test_streaming_response_detach(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_detach(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_detach(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): await async_client.agents.knowledge_bases.with_raw_response.detach( knowledge_base_uuid="knowledge_base_uuid", diff --git a/tests/api_resources/agents/test_versions.py b/tests/api_resources/agents/test_versions.py index 94f02d8c..79f73672 100644 --- a/tests/api_resources/agents/test_versions.py +++ b/tests/api_resources/agents/test_versions.py @@ -7,9 +7,9 @@ import pytest +from gradientai import GradientAI, AsyncGradientAI from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types.agents import ( +from gradientai.types.agents import ( VersionListResponse, VersionUpdateResponse, ) @@ -22,7 +22,7 @@ class TestVersions: @pytest.mark.skip() @parametrize - def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update(self, client: GradientAI) -> None: version = client.agents.versions.update( path_uuid="uuid", ) @@ -30,7 +30,7 @@ def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update_with_all_params(self, client: GradientAI) -> None: version = client.agents.versions.update( path_uuid="uuid", body_uuid="uuid", @@ -40,7 +40,7 @@ def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_update(self, client: GradientAI) -> None: response = client.agents.versions.with_raw_response.update( path_uuid="uuid", ) @@ -52,7 +52,7 @@ def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_update(self, client: GradientAI) -> None: with client.agents.versions.with_streaming_response.update( path_uuid="uuid", ) as response: @@ -66,7 +66,7 @@ def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_update(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): client.agents.versions.with_raw_response.update( path_uuid="", @@ -74,7 +74,7 @@ def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list(self, client: GradientAI) -> None: version = client.agents.versions.list( uuid="uuid", ) @@ -82,7 +82,7 @@ def test_method_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list_with_all_params(self, client: GradientAI) -> None: version = client.agents.versions.list( uuid="uuid", page=0, @@ -92,7 +92,7 @@ def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_list(self, client: GradientAI) -> None: response = client.agents.versions.with_raw_response.list( uuid="uuid", ) @@ -104,7 +104,7 @@ def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_list(self, client: GradientAI) -> None: with client.agents.versions.with_streaming_response.list( uuid="uuid", ) as response: @@ -118,7 +118,7 @@ def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_list(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.agents.versions.with_raw_response.list( uuid="", @@ -126,11 +126,13 @@ def test_path_params_list(self, client: DigitaloceanGenaiSDK) -> None: class TestAsyncVersions: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update(self, async_client: AsyncGradientAI) -> None: version = await async_client.agents.versions.update( path_uuid="uuid", ) @@ -138,7 +140,7 @@ async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: version = await async_client.agents.versions.update( path_uuid="uuid", body_uuid="uuid", @@ -148,7 +150,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.versions.with_raw_response.update( path_uuid="uuid", ) @@ -160,7 +162,7 @@ async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.versions.with_streaming_response.update( path_uuid="uuid", ) as response: @@ -174,7 +176,7 @@ async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): await async_client.agents.versions.with_raw_response.update( path_uuid="", @@ -182,7 +184,7 @@ async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list(self, async_client: AsyncGradientAI) -> None: version = await async_client.agents.versions.list( uuid="uuid", ) @@ -190,7 +192,7 @@ async def test_method_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> Non @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: version = await async_client.agents.versions.list( uuid="uuid", page=0, @@ -200,7 +202,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncDigitalocean @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.versions.with_raw_response.list( uuid="uuid", ) @@ -212,7 +214,7 @@ async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.versions.with_streaming_response.list( uuid="uuid", ) as response: @@ -226,7 +228,7 @@ async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGena @pytest.mark.skip() @parametrize - async def test_path_params_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_list(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.agents.versions.with_raw_response.list( uuid="", diff --git a/tests/api_resources/auth/agents/test_token.py b/tests/api_resources/auth/agents/test_token.py deleted file mode 100644 index 1e505ccd..00000000 --- a/tests/api_resources/auth/agents/test_token.py +++ /dev/null @@ -1,124 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -import os -from typing import Any, cast - -import pytest - -from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types.auth.agents import TokenCreateResponse - -base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") - - -class TestToken: - parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - - @pytest.mark.skip() - @parametrize - def test_method_create(self, client: DigitaloceanGenaiSDK) -> None: - token = client.auth.agents.token.create( - path_agent_uuid="agent_uuid", - ) - assert_matches_type(TokenCreateResponse, token, path=["response"]) - - @pytest.mark.skip() - @parametrize - def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: - token = client.auth.agents.token.create( - path_agent_uuid="agent_uuid", - body_agent_uuid="agent_uuid", - ) - assert_matches_type(TokenCreateResponse, token, path=["response"]) - - @pytest.mark.skip() - @parametrize - def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: - response = client.auth.agents.token.with_raw_response.create( - path_agent_uuid="agent_uuid", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - token = response.parse() - assert_matches_type(TokenCreateResponse, token, path=["response"]) - - @pytest.mark.skip() - @parametrize - def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: - with client.auth.agents.token.with_streaming_response.create( - path_agent_uuid="agent_uuid", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - token = response.parse() - assert_matches_type(TokenCreateResponse, token, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @pytest.mark.skip() - @parametrize - def test_path_params_create(self, client: DigitaloceanGenaiSDK) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): - client.auth.agents.token.with_raw_response.create( - path_agent_uuid="", - ) - - -class TestAsyncToken: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) - - @pytest.mark.skip() - @parametrize - async def test_method_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - token = await async_client.auth.agents.token.create( - path_agent_uuid="agent_uuid", - ) - assert_matches_type(TokenCreateResponse, token, path=["response"]) - - @pytest.mark.skip() - @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - token = await async_client.auth.agents.token.create( - path_agent_uuid="agent_uuid", - body_agent_uuid="agent_uuid", - ) - assert_matches_type(TokenCreateResponse, token, path=["response"]) - - @pytest.mark.skip() - @parametrize - async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - response = await async_client.auth.agents.token.with_raw_response.create( - path_agent_uuid="agent_uuid", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - token = await response.parse() - assert_matches_type(TokenCreateResponse, token, path=["response"]) - - @pytest.mark.skip() - @parametrize - async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - async with async_client.auth.agents.token.with_streaming_response.create( - path_agent_uuid="agent_uuid", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - token = await response.parse() - assert_matches_type(TokenCreateResponse, token, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @pytest.mark.skip() - @parametrize - async def test_path_params_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): - await async_client.auth.agents.token.with_raw_response.create( - path_agent_uuid="", - ) diff --git a/tests/api_resources/api_keys/__init__.py b/tests/api_resources/chat/__init__.py similarity index 100% rename from tests/api_resources/api_keys/__init__.py rename to tests/api_resources/chat/__init__.py diff --git a/tests/api_resources/test_chat.py b/tests/api_resources/chat/test_completions.py similarity index 61% rename from tests/api_resources/test_chat.py rename to tests/api_resources/chat/test_completions.py index 0bf48414..b4c09579 100644 --- a/tests/api_resources/test_chat.py +++ b/tests/api_resources/chat/test_completions.py @@ -7,20 +7,20 @@ import pytest +from gradientai import GradientAI, AsyncGradientAI from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types import ChatCreateCompletionResponse +from gradientai.types.chat import CompletionCreateResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -class TestChat: +class TestCompletions: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @pytest.mark.skip() @parametrize - def test_method_create_completion(self, client: DigitaloceanGenaiSDK) -> None: - chat = client.chat.create_completion( + def test_method_create(self, client: GradientAI) -> None: + completion = client.chat.completions.create( messages=[ { "content": "string", @@ -29,12 +29,12 @@ def test_method_create_completion(self, client: DigitaloceanGenaiSDK) -> None: ], model="llama3-8b-instruct", ) - assert_matches_type(ChatCreateCompletionResponse, chat, path=["response"]) + assert_matches_type(CompletionCreateResponse, completion, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_completion_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: - chat = client.chat.create_completion( + def test_method_create_with_all_params(self, client: GradientAI) -> None: + completion = client.chat.completions.create( messages=[ { "content": "string", @@ -58,12 +58,12 @@ def test_method_create_completion_with_all_params(self, client: DigitaloceanGena top_p=1, user="user-1234", ) - assert_matches_type(ChatCreateCompletionResponse, chat, path=["response"]) + assert_matches_type(CompletionCreateResponse, completion, path=["response"]) @pytest.mark.skip() @parametrize - def test_raw_response_create_completion(self, client: DigitaloceanGenaiSDK) -> None: - response = client.chat.with_raw_response.create_completion( + def test_raw_response_create(self, client: GradientAI) -> None: + response = client.chat.completions.with_raw_response.create( messages=[ { "content": "string", @@ -75,13 +75,13 @@ def test_raw_response_create_completion(self, client: DigitaloceanGenaiSDK) -> N assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" - chat = response.parse() - assert_matches_type(ChatCreateCompletionResponse, chat, path=["response"]) + completion = response.parse() + assert_matches_type(CompletionCreateResponse, completion, path=["response"]) @pytest.mark.skip() @parametrize - def test_streaming_response_create_completion(self, client: DigitaloceanGenaiSDK) -> None: - with client.chat.with_streaming_response.create_completion( + def test_streaming_response_create(self, client: GradientAI) -> None: + with client.chat.completions.with_streaming_response.create( messages=[ { "content": "string", @@ -93,19 +93,21 @@ def test_streaming_response_create_completion(self, client: DigitaloceanGenaiSDK assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" - chat = response.parse() - assert_matches_type(ChatCreateCompletionResponse, chat, path=["response"]) + completion = response.parse() + assert_matches_type(CompletionCreateResponse, completion, path=["response"]) assert cast(Any, response.is_closed) is True -class TestAsyncChat: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) +class TestAsyncCompletions: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @pytest.mark.skip() @parametrize - async def test_method_create_completion(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - chat = await async_client.chat.create_completion( + async def test_method_create(self, async_client: AsyncGradientAI) -> None: + completion = await async_client.chat.completions.create( messages=[ { "content": "string", @@ -114,12 +116,12 @@ async def test_method_create_completion(self, async_client: AsyncDigitaloceanGen ], model="llama3-8b-instruct", ) - assert_matches_type(ChatCreateCompletionResponse, chat, path=["response"]) + assert_matches_type(CompletionCreateResponse, completion, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_completion_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - chat = await async_client.chat.create_completion( + async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + completion = await async_client.chat.completions.create( messages=[ { "content": "string", @@ -143,12 +145,12 @@ async def test_method_create_completion_with_all_params(self, async_client: Asyn top_p=1, user="user-1234", ) - assert_matches_type(ChatCreateCompletionResponse, chat, path=["response"]) + assert_matches_type(CompletionCreateResponse, completion, path=["response"]) @pytest.mark.skip() @parametrize - async def test_raw_response_create_completion(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - response = await async_client.chat.with_raw_response.create_completion( + async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + response = await async_client.chat.completions.with_raw_response.create( messages=[ { "content": "string", @@ -160,13 +162,13 @@ async def test_raw_response_create_completion(self, async_client: AsyncDigitaloc assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" - chat = await response.parse() - assert_matches_type(ChatCreateCompletionResponse, chat, path=["response"]) + completion = await response.parse() + assert_matches_type(CompletionCreateResponse, completion, path=["response"]) @pytest.mark.skip() @parametrize - async def test_streaming_response_create_completion(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - async with async_client.chat.with_streaming_response.create_completion( + async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async with async_client.chat.completions.with_streaming_response.create( messages=[ { "content": "string", @@ -178,7 +180,7 @@ async def test_streaming_response_create_completion(self, async_client: AsyncDig assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" - chat = await response.parse() - assert_matches_type(ChatCreateCompletionResponse, chat, path=["response"]) + completion = await response.parse() + assert_matches_type(CompletionCreateResponse, completion, path=["response"]) assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/auth/__init__.py b/tests/api_resources/inference/__init__.py similarity index 100% rename from tests/api_resources/auth/__init__.py rename to tests/api_resources/inference/__init__.py diff --git a/tests/api_resources/api_keys/test_api_keys_.py b/tests/api_resources/inference/test_api_keys.py similarity index 67% rename from tests/api_resources/api_keys/test_api_keys_.py rename to tests/api_resources/inference/test_api_keys.py index 0ae74d6b..90bf95b9 100644 --- a/tests/api_resources/api_keys/test_api_keys_.py +++ b/tests/api_resources/inference/test_api_keys.py @@ -7,9 +7,9 @@ import pytest +from gradientai import GradientAI, AsyncGradientAI from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types.api_keys import ( +from gradientai.types.inference import ( APIKeyListResponse, APIKeyCreateResponse, APIKeyDeleteResponse, @@ -25,22 +25,22 @@ class TestAPIKeys: @pytest.mark.skip() @parametrize - def test_method_create(self, client: DigitaloceanGenaiSDK) -> None: - api_key = client.api_keys.api_keys.create() + def test_method_create(self, client: GradientAI) -> None: + api_key = client.inference.api_keys.create() assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: - api_key = client.api_keys.api_keys.create( + def test_method_create_with_all_params(self, client: GradientAI) -> None: + api_key = client.inference.api_keys.create( name="name", ) assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: - response = client.api_keys.api_keys.with_raw_response.create() + def test_raw_response_create(self, client: GradientAI) -> None: + response = client.inference.api_keys.with_raw_response.create() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -49,8 +49,8 @@ def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: - with client.api_keys.api_keys.with_streaming_response.create() as response: + def test_streaming_response_create(self, client: GradientAI) -> None: + with client.inference.api_keys.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -61,16 +61,16 @@ def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: - api_key = client.api_keys.api_keys.update( + def test_method_update(self, client: GradientAI) -> None: + api_key = client.inference.api_keys.update( path_api_key_uuid="api_key_uuid", ) assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: - api_key = client.api_keys.api_keys.update( + def test_method_update_with_all_params(self, client: GradientAI) -> None: + api_key = client.inference.api_keys.update( path_api_key_uuid="api_key_uuid", body_api_key_uuid="api_key_uuid", name="name", @@ -79,8 +79,8 @@ def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: - response = client.api_keys.api_keys.with_raw_response.update( + def test_raw_response_update(self, client: GradientAI) -> None: + response = client.inference.api_keys.with_raw_response.update( path_api_key_uuid="api_key_uuid", ) @@ -91,8 +91,8 @@ def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: - with client.api_keys.api_keys.with_streaming_response.update( + def test_streaming_response_update(self, client: GradientAI) -> None: + with client.inference.api_keys.with_streaming_response.update( path_api_key_uuid="api_key_uuid", ) as response: assert not response.is_closed @@ -105,22 +105,22 @@ def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_update(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): - client.api_keys.api_keys.with_raw_response.update( + client.inference.api_keys.with_raw_response.update( path_api_key_uuid="", ) @pytest.mark.skip() @parametrize - def test_method_list(self, client: DigitaloceanGenaiSDK) -> None: - api_key = client.api_keys.api_keys.list() + def test_method_list(self, client: GradientAI) -> None: + api_key = client.inference.api_keys.list() assert_matches_type(APIKeyListResponse, api_key, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: - api_key = client.api_keys.api_keys.list( + def test_method_list_with_all_params(self, client: GradientAI) -> None: + api_key = client.inference.api_keys.list( page=0, per_page=0, ) @@ -128,8 +128,8 @@ def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: - response = client.api_keys.api_keys.with_raw_response.list() + def test_raw_response_list(self, client: GradientAI) -> None: + response = client.inference.api_keys.with_raw_response.list() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -138,8 +138,8 @@ def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: - with client.api_keys.api_keys.with_streaming_response.list() as response: + def test_streaming_response_list(self, client: GradientAI) -> None: + with client.inference.api_keys.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -150,16 +150,16 @@ def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: - api_key = client.api_keys.api_keys.delete( + def test_method_delete(self, client: GradientAI) -> None: + api_key = client.inference.api_keys.delete( "api_key_uuid", ) assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"]) @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: - response = client.api_keys.api_keys.with_raw_response.delete( + def test_raw_response_delete(self, client: GradientAI) -> None: + response = client.inference.api_keys.with_raw_response.delete( "api_key_uuid", ) @@ -170,8 +170,8 @@ def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: - with client.api_keys.api_keys.with_streaming_response.delete( + def test_streaming_response_delete(self, client: GradientAI) -> None: + with client.inference.api_keys.with_streaming_response.delete( "api_key_uuid", ) as response: assert not response.is_closed @@ -184,24 +184,24 @@ def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_delete(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): - client.api_keys.api_keys.with_raw_response.delete( + client.inference.api_keys.with_raw_response.delete( "", ) @pytest.mark.skip() @parametrize - def test_method_update_regenerate(self, client: DigitaloceanGenaiSDK) -> None: - api_key = client.api_keys.api_keys.update_regenerate( + def test_method_update_regenerate(self, client: GradientAI) -> None: + api_key = client.inference.api_keys.update_regenerate( "api_key_uuid", ) assert_matches_type(APIKeyUpdateRegenerateResponse, api_key, path=["response"]) @pytest.mark.skip() @parametrize - def test_raw_response_update_regenerate(self, client: DigitaloceanGenaiSDK) -> None: - response = client.api_keys.api_keys.with_raw_response.update_regenerate( + def test_raw_response_update_regenerate(self, client: GradientAI) -> None: + response = client.inference.api_keys.with_raw_response.update_regenerate( "api_key_uuid", ) @@ -212,8 +212,8 @@ def test_raw_response_update_regenerate(self, client: DigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - def test_streaming_response_update_regenerate(self, client: DigitaloceanGenaiSDK) -> None: - with client.api_keys.api_keys.with_streaming_response.update_regenerate( + def test_streaming_response_update_regenerate(self, client: GradientAI) -> None: + with client.inference.api_keys.with_streaming_response.update_regenerate( "api_key_uuid", ) as response: assert not response.is_closed @@ -226,34 +226,36 @@ def test_streaming_response_update_regenerate(self, client: DigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - def test_path_params_update_regenerate(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_update_regenerate(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): - client.api_keys.api_keys.with_raw_response.update_regenerate( + client.inference.api_keys.with_raw_response.update_regenerate( "", ) class TestAsyncAPIKeys: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - api_key = await async_client.api_keys.api_keys.create() + async def test_method_create(self, async_client: AsyncGradientAI) -> None: + api_key = await async_client.inference.api_keys.create() assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - api_key = await async_client.api_keys.api_keys.create( + async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + api_key = await async_client.inference.api_keys.create( name="name", ) assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - response = await async_client.api_keys.api_keys.with_raw_response.create() + async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + response = await async_client.inference.api_keys.with_raw_response.create() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -262,8 +264,8 @@ async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - async with async_client.api_keys.api_keys.with_streaming_response.create() as response: + async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async with async_client.inference.api_keys.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -274,16 +276,16 @@ async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - api_key = await async_client.api_keys.api_keys.update( + async def test_method_update(self, async_client: AsyncGradientAI) -> None: + api_key = await async_client.inference.api_keys.update( path_api_key_uuid="api_key_uuid", ) assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - api_key = await async_client.api_keys.api_keys.update( + async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + api_key = await async_client.inference.api_keys.update( path_api_key_uuid="api_key_uuid", body_api_key_uuid="api_key_uuid", name="name", @@ -292,8 +294,8 @@ async def test_method_update_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - response = await async_client.api_keys.api_keys.with_raw_response.update( + async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + response = await async_client.inference.api_keys.with_raw_response.update( path_api_key_uuid="api_key_uuid", ) @@ -304,8 +306,8 @@ async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - async with async_client.api_keys.api_keys.with_streaming_response.update( + async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async with async_client.inference.api_keys.with_streaming_response.update( path_api_key_uuid="api_key_uuid", ) as response: assert not response.is_closed @@ -318,22 +320,22 @@ async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): - await async_client.api_keys.api_keys.with_raw_response.update( + await async_client.inference.api_keys.with_raw_response.update( path_api_key_uuid="", ) @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - api_key = await async_client.api_keys.api_keys.list() + async def test_method_list(self, async_client: AsyncGradientAI) -> None: + api_key = await async_client.inference.api_keys.list() assert_matches_type(APIKeyListResponse, api_key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - api_key = await async_client.api_keys.api_keys.list( + async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + api_key = await async_client.inference.api_keys.list( page=0, per_page=0, ) @@ -341,8 +343,8 @@ async def test_method_list_with_all_params(self, async_client: AsyncDigitalocean @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - response = await async_client.api_keys.api_keys.with_raw_response.list() + async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + response = await async_client.inference.api_keys.with_raw_response.list() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -351,8 +353,8 @@ async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - async with async_client.api_keys.api_keys.with_streaming_response.list() as response: + async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async with async_client.inference.api_keys.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -363,16 +365,16 @@ async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGena @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - api_key = await async_client.api_keys.api_keys.delete( + async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + api_key = await async_client.inference.api_keys.delete( "api_key_uuid", ) assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - response = await async_client.api_keys.api_keys.with_raw_response.delete( + async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + response = await async_client.inference.api_keys.with_raw_response.delete( "api_key_uuid", ) @@ -383,8 +385,8 @@ async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - async with async_client.api_keys.api_keys.with_streaming_response.delete( + async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async with async_client.inference.api_keys.with_streaming_response.delete( "api_key_uuid", ) as response: assert not response.is_closed @@ -397,24 +399,24 @@ async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): - await async_client.api_keys.api_keys.with_raw_response.delete( + await async_client.inference.api_keys.with_raw_response.delete( "", ) @pytest.mark.skip() @parametrize - async def test_method_update_regenerate(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - api_key = await async_client.api_keys.api_keys.update_regenerate( + async def test_method_update_regenerate(self, async_client: AsyncGradientAI) -> None: + api_key = await async_client.inference.api_keys.update_regenerate( "api_key_uuid", ) assert_matches_type(APIKeyUpdateRegenerateResponse, api_key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_raw_response_update_regenerate(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - response = await async_client.api_keys.api_keys.with_raw_response.update_regenerate( + async def test_raw_response_update_regenerate(self, async_client: AsyncGradientAI) -> None: + response = await async_client.inference.api_keys.with_raw_response.update_regenerate( "api_key_uuid", ) @@ -425,8 +427,8 @@ async def test_raw_response_update_regenerate(self, async_client: AsyncDigitaloc @pytest.mark.skip() @parametrize - async def test_streaming_response_update_regenerate(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - async with async_client.api_keys.api_keys.with_streaming_response.update_regenerate( + async def test_streaming_response_update_regenerate(self, async_client: AsyncGradientAI) -> None: + async with async_client.inference.api_keys.with_streaming_response.update_regenerate( "api_key_uuid", ) as response: assert not response.is_closed @@ -439,8 +441,8 @@ async def test_streaming_response_update_regenerate(self, async_client: AsyncDig @pytest.mark.skip() @parametrize - async def test_path_params_update_regenerate(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_update_regenerate(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): - await async_client.api_keys.api_keys.with_raw_response.update_regenerate( + await async_client.inference.api_keys.with_raw_response.update_regenerate( "", ) diff --git a/tests/api_resources/inference/test_models.py b/tests/api_resources/inference/test_models.py new file mode 100644 index 00000000..569345ed --- /dev/null +++ b/tests/api_resources/inference/test_models.py @@ -0,0 +1,164 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from gradientai import GradientAI, AsyncGradientAI +from tests.utils import assert_matches_type +from gradientai.types.inference import Model, ModelListResponse + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestModels: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_retrieve(self, client: GradientAI) -> None: + model = client.inference.models.retrieve( + "llama3-8b-instruct", + ) + assert_matches_type(Model, model, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_retrieve(self, client: GradientAI) -> None: + response = client.inference.models.with_raw_response.retrieve( + "llama3-8b-instruct", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + model = response.parse() + assert_matches_type(Model, model, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_retrieve(self, client: GradientAI) -> None: + with client.inference.models.with_streaming_response.retrieve( + "llama3-8b-instruct", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + model = response.parse() + assert_matches_type(Model, model, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_retrieve(self, client: GradientAI) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `model` but received ''"): + client.inference.models.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + def test_method_list(self, client: GradientAI) -> None: + model = client.inference.models.list() + assert_matches_type(ModelListResponse, model, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_list(self, client: GradientAI) -> None: + response = client.inference.models.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + model = response.parse() + assert_matches_type(ModelListResponse, model, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_list(self, client: GradientAI) -> None: + with client.inference.models.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + model = response.parse() + assert_matches_type(ModelListResponse, model, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncModels: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + model = await async_client.inference.models.retrieve( + "llama3-8b-instruct", + ) + assert_matches_type(Model, model, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + response = await async_client.inference.models.with_raw_response.retrieve( + "llama3-8b-instruct", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + model = await response.parse() + assert_matches_type(Model, model, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async with async_client.inference.models.with_streaming_response.retrieve( + "llama3-8b-instruct", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + model = await response.parse() + assert_matches_type(Model, model, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `model` but received ''"): + await async_client.inference.models.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_list(self, async_client: AsyncGradientAI) -> None: + model = await async_client.inference.models.list() + assert_matches_type(ModelListResponse, model, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + response = await async_client.inference.models.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + model = await response.parse() + assert_matches_type(ModelListResponse, model, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async with async_client.inference.models.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + model = await response.parse() + assert_matches_type(ModelListResponse, model, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/knowledge_bases/test_data_sources.py b/tests/api_resources/knowledge_bases/test_data_sources.py index 68fd67e5..9c466e2f 100644 --- a/tests/api_resources/knowledge_bases/test_data_sources.py +++ b/tests/api_resources/knowledge_bases/test_data_sources.py @@ -7,9 +7,9 @@ import pytest +from gradientai import GradientAI, AsyncGradientAI from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types.knowledge_bases import ( +from gradientai.types.knowledge_bases import ( DataSourceListResponse, DataSourceCreateResponse, DataSourceDeleteResponse, @@ -23,7 +23,7 @@ class TestDataSources: @pytest.mark.skip() @parametrize - def test_method_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_create(self, client: GradientAI) -> None: data_source = client.knowledge_bases.data_sources.create( path_knowledge_base_uuid="knowledge_base_uuid", ) @@ -31,7 +31,7 @@ def test_method_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_create_with_all_params(self, client: GradientAI) -> None: data_source = client.knowledge_bases.data_sources.create( path_knowledge_base_uuid="knowledge_base_uuid", aws_data_source={ @@ -57,7 +57,7 @@ def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_create(self, client: GradientAI) -> None: response = client.knowledge_bases.data_sources.with_raw_response.create( path_knowledge_base_uuid="knowledge_base_uuid", ) @@ -69,7 +69,7 @@ def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_create(self, client: GradientAI) -> None: with client.knowledge_bases.data_sources.with_streaming_response.create( path_knowledge_base_uuid="knowledge_base_uuid", ) as response: @@ -83,7 +83,7 @@ def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_create(self, client: GradientAI) -> None: with pytest.raises( ValueError, match=r"Expected a non-empty value for `path_knowledge_base_uuid` but received ''" ): @@ -93,7 +93,7 @@ def test_path_params_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list(self, client: GradientAI) -> None: data_source = client.knowledge_bases.data_sources.list( knowledge_base_uuid="knowledge_base_uuid", ) @@ -101,7 +101,7 @@ def test_method_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list_with_all_params(self, client: GradientAI) -> None: data_source = client.knowledge_bases.data_sources.list( knowledge_base_uuid="knowledge_base_uuid", page=0, @@ -111,7 +111,7 @@ def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_list(self, client: GradientAI) -> None: response = client.knowledge_bases.data_sources.with_raw_response.list( knowledge_base_uuid="knowledge_base_uuid", ) @@ -123,7 +123,7 @@ def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_list(self, client: GradientAI) -> None: with client.knowledge_bases.data_sources.with_streaming_response.list( knowledge_base_uuid="knowledge_base_uuid", ) as response: @@ -137,7 +137,7 @@ def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_list(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"): client.knowledge_bases.data_sources.with_raw_response.list( knowledge_base_uuid="", @@ -145,7 +145,7 @@ def test_path_params_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_delete(self, client: GradientAI) -> None: data_source = client.knowledge_bases.data_sources.delete( data_source_uuid="data_source_uuid", knowledge_base_uuid="knowledge_base_uuid", @@ -154,7 +154,7 @@ def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_delete(self, client: GradientAI) -> None: response = client.knowledge_bases.data_sources.with_raw_response.delete( data_source_uuid="data_source_uuid", knowledge_base_uuid="knowledge_base_uuid", @@ -167,7 +167,7 @@ def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_delete(self, client: GradientAI) -> None: with client.knowledge_bases.data_sources.with_streaming_response.delete( data_source_uuid="data_source_uuid", knowledge_base_uuid="knowledge_base_uuid", @@ -182,7 +182,7 @@ def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_delete(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"): client.knowledge_bases.data_sources.with_raw_response.delete( data_source_uuid="data_source_uuid", @@ -197,11 +197,13 @@ def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: class TestAsyncDataSources: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_create(self, async_client: AsyncGradientAI) -> None: data_source = await async_client.knowledge_bases.data_sources.create( path_knowledge_base_uuid="knowledge_base_uuid", ) @@ -209,7 +211,7 @@ async def test_method_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: data_source = await async_client.knowledge_bases.data_sources.create( path_knowledge_base_uuid="knowledge_base_uuid", aws_data_source={ @@ -235,7 +237,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: response = await async_client.knowledge_bases.data_sources.with_raw_response.create( path_knowledge_base_uuid="knowledge_base_uuid", ) @@ -247,7 +249,7 @@ async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: async with async_client.knowledge_bases.data_sources.with_streaming_response.create( path_knowledge_base_uuid="knowledge_base_uuid", ) as response: @@ -261,7 +263,7 @@ async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_create(self, async_client: AsyncGradientAI) -> None: with pytest.raises( ValueError, match=r"Expected a non-empty value for `path_knowledge_base_uuid` but received ''" ): @@ -271,7 +273,7 @@ async def test_path_params_create(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list(self, async_client: AsyncGradientAI) -> None: data_source = await async_client.knowledge_bases.data_sources.list( knowledge_base_uuid="knowledge_base_uuid", ) @@ -279,7 +281,7 @@ async def test_method_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> Non @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: data_source = await async_client.knowledge_bases.data_sources.list( knowledge_base_uuid="knowledge_base_uuid", page=0, @@ -289,7 +291,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncDigitalocean @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: response = await async_client.knowledge_bases.data_sources.with_raw_response.list( knowledge_base_uuid="knowledge_base_uuid", ) @@ -301,7 +303,7 @@ async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: async with async_client.knowledge_bases.data_sources.with_streaming_response.list( knowledge_base_uuid="knowledge_base_uuid", ) as response: @@ -315,7 +317,7 @@ async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGena @pytest.mark.skip() @parametrize - async def test_path_params_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_list(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"): await async_client.knowledge_bases.data_sources.with_raw_response.list( knowledge_base_uuid="", @@ -323,7 +325,7 @@ async def test_path_params_list(self, async_client: AsyncDigitaloceanGenaiSDK) - @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_delete(self, async_client: AsyncGradientAI) -> None: data_source = await async_client.knowledge_bases.data_sources.delete( data_source_uuid="data_source_uuid", knowledge_base_uuid="knowledge_base_uuid", @@ -332,7 +334,7 @@ async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: response = await async_client.knowledge_bases.data_sources.with_raw_response.delete( data_source_uuid="data_source_uuid", knowledge_base_uuid="knowledge_base_uuid", @@ -345,7 +347,7 @@ async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: async with async_client.knowledge_bases.data_sources.with_streaming_response.delete( data_source_uuid="data_source_uuid", knowledge_base_uuid="knowledge_base_uuid", @@ -360,7 +362,7 @@ async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"): await async_client.knowledge_bases.data_sources.with_raw_response.delete( data_source_uuid="data_source_uuid", diff --git a/tests/api_resources/providers/anthropic/test_keys.py b/tests/api_resources/providers/anthropic/test_keys.py index c5491bd4..86ec19f4 100644 --- a/tests/api_resources/providers/anthropic/test_keys.py +++ b/tests/api_resources/providers/anthropic/test_keys.py @@ -7,9 +7,9 @@ import pytest +from gradientai import GradientAI, AsyncGradientAI from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types.providers.anthropic import ( +from gradientai.types.providers.anthropic import ( KeyListResponse, KeyCreateResponse, KeyDeleteResponse, @@ -26,13 +26,13 @@ class TestKeys: @pytest.mark.skip() @parametrize - def test_method_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_create(self, client: GradientAI) -> None: key = client.providers.anthropic.keys.create() assert_matches_type(KeyCreateResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_create_with_all_params(self, client: GradientAI) -> None: key = client.providers.anthropic.keys.create( api_key="api_key", name="name", @@ -41,7 +41,7 @@ def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_create(self, client: GradientAI) -> None: response = client.providers.anthropic.keys.with_raw_response.create() assert response.is_closed is True @@ -51,7 +51,7 @@ def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_create(self, client: GradientAI) -> None: with client.providers.anthropic.keys.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -63,7 +63,7 @@ def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_retrieve(self, client: GradientAI) -> None: key = client.providers.anthropic.keys.retrieve( "api_key_uuid", ) @@ -71,7 +71,7 @@ def test_method_retrieve(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_retrieve(self, client: GradientAI) -> None: response = client.providers.anthropic.keys.with_raw_response.retrieve( "api_key_uuid", ) @@ -83,7 +83,7 @@ def test_raw_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_retrieve(self, client: GradientAI) -> None: with client.providers.anthropic.keys.with_streaming_response.retrieve( "api_key_uuid", ) as response: @@ -97,7 +97,7 @@ def test_streaming_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_retrieve(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): client.providers.anthropic.keys.with_raw_response.retrieve( "", @@ -105,7 +105,7 @@ def test_path_params_retrieve(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update(self, client: GradientAI) -> None: key = client.providers.anthropic.keys.update( path_api_key_uuid="api_key_uuid", ) @@ -113,7 +113,7 @@ def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update_with_all_params(self, client: GradientAI) -> None: key = client.providers.anthropic.keys.update( path_api_key_uuid="api_key_uuid", api_key="api_key", @@ -124,7 +124,7 @@ def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_update(self, client: GradientAI) -> None: response = client.providers.anthropic.keys.with_raw_response.update( path_api_key_uuid="api_key_uuid", ) @@ -136,7 +136,7 @@ def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_update(self, client: GradientAI) -> None: with client.providers.anthropic.keys.with_streaming_response.update( path_api_key_uuid="api_key_uuid", ) as response: @@ -150,7 +150,7 @@ def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_update(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): client.providers.anthropic.keys.with_raw_response.update( path_api_key_uuid="", @@ -158,13 +158,13 @@ def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list(self, client: GradientAI) -> None: key = client.providers.anthropic.keys.list() assert_matches_type(KeyListResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list_with_all_params(self, client: GradientAI) -> None: key = client.providers.anthropic.keys.list( page=0, per_page=0, @@ -173,7 +173,7 @@ def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_list(self, client: GradientAI) -> None: response = client.providers.anthropic.keys.with_raw_response.list() assert response.is_closed is True @@ -183,7 +183,7 @@ def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_list(self, client: GradientAI) -> None: with client.providers.anthropic.keys.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -195,7 +195,7 @@ def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_delete(self, client: GradientAI) -> None: key = client.providers.anthropic.keys.delete( "api_key_uuid", ) @@ -203,7 +203,7 @@ def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_delete(self, client: GradientAI) -> None: response = client.providers.anthropic.keys.with_raw_response.delete( "api_key_uuid", ) @@ -215,7 +215,7 @@ def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_delete(self, client: GradientAI) -> None: with client.providers.anthropic.keys.with_streaming_response.delete( "api_key_uuid", ) as response: @@ -229,7 +229,7 @@ def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_delete(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): client.providers.anthropic.keys.with_raw_response.delete( "", @@ -237,7 +237,7 @@ def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_list_agents(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list_agents(self, client: GradientAI) -> None: key = client.providers.anthropic.keys.list_agents( uuid="uuid", ) @@ -245,7 +245,7 @@ def test_method_list_agents(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_list_agents_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list_agents_with_all_params(self, client: GradientAI) -> None: key = client.providers.anthropic.keys.list_agents( uuid="uuid", page=0, @@ -255,7 +255,7 @@ def test_method_list_agents_with_all_params(self, client: DigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - def test_raw_response_list_agents(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_list_agents(self, client: GradientAI) -> None: response = client.providers.anthropic.keys.with_raw_response.list_agents( uuid="uuid", ) @@ -267,7 +267,7 @@ def test_raw_response_list_agents(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list_agents(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_list_agents(self, client: GradientAI) -> None: with client.providers.anthropic.keys.with_streaming_response.list_agents( uuid="uuid", ) as response: @@ -281,7 +281,7 @@ def test_streaming_response_list_agents(self, client: DigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - def test_path_params_list_agents(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_list_agents(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.providers.anthropic.keys.with_raw_response.list_agents( uuid="", @@ -289,17 +289,19 @@ def test_path_params_list_agents(self, client: DigitaloceanGenaiSDK) -> None: class TestAsyncKeys: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_create(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.anthropic.keys.create() assert_matches_type(KeyCreateResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.anthropic.keys.create( api_key="api_key", name="name", @@ -308,7 +310,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: response = await async_client.providers.anthropic.keys.with_raw_response.create() assert response.is_closed is True @@ -318,7 +320,7 @@ async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: async with async_client.providers.anthropic.keys.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -330,7 +332,7 @@ async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.anthropic.keys.retrieve( "api_key_uuid", ) @@ -338,7 +340,7 @@ async def test_method_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: response = await async_client.providers.anthropic.keys.with_raw_response.retrieve( "api_key_uuid", ) @@ -350,7 +352,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncDigitaloceanGenaiS @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: async with async_client.providers.anthropic.keys.with_streaming_response.retrieve( "api_key_uuid", ) as response: @@ -364,7 +366,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncDigitalocean @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): await async_client.providers.anthropic.keys.with_raw_response.retrieve( "", @@ -372,7 +374,7 @@ async def test_path_params_retrieve(self, async_client: AsyncDigitaloceanGenaiSD @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.anthropic.keys.update( path_api_key_uuid="api_key_uuid", ) @@ -380,7 +382,7 @@ async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.anthropic.keys.update( path_api_key_uuid="api_key_uuid", api_key="api_key", @@ -391,7 +393,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: response = await async_client.providers.anthropic.keys.with_raw_response.update( path_api_key_uuid="api_key_uuid", ) @@ -403,7 +405,7 @@ async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: async with async_client.providers.anthropic.keys.with_streaming_response.update( path_api_key_uuid="api_key_uuid", ) as response: @@ -417,7 +419,7 @@ async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): await async_client.providers.anthropic.keys.with_raw_response.update( path_api_key_uuid="", @@ -425,13 +427,13 @@ async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.anthropic.keys.list() assert_matches_type(KeyListResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.anthropic.keys.list( page=0, per_page=0, @@ -440,7 +442,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncDigitalocean @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: response = await async_client.providers.anthropic.keys.with_raw_response.list() assert response.is_closed is True @@ -450,7 +452,7 @@ async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: async with async_client.providers.anthropic.keys.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -462,7 +464,7 @@ async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGena @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_delete(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.anthropic.keys.delete( "api_key_uuid", ) @@ -470,7 +472,7 @@ async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: response = await async_client.providers.anthropic.keys.with_raw_response.delete( "api_key_uuid", ) @@ -482,7 +484,7 @@ async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: async with async_client.providers.anthropic.keys.with_streaming_response.delete( "api_key_uuid", ) as response: @@ -496,7 +498,7 @@ async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): await async_client.providers.anthropic.keys.with_raw_response.delete( "", @@ -504,7 +506,7 @@ async def test_path_params_delete(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_list_agents(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list_agents(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.anthropic.keys.list_agents( uuid="uuid", ) @@ -512,7 +514,7 @@ async def test_method_list_agents(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_list_agents_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list_agents_with_all_params(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.anthropic.keys.list_agents( uuid="uuid", page=0, @@ -522,7 +524,7 @@ async def test_method_list_agents_with_all_params(self, async_client: AsyncDigit @pytest.mark.skip() @parametrize - async def test_raw_response_list_agents(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_list_agents(self, async_client: AsyncGradientAI) -> None: response = await async_client.providers.anthropic.keys.with_raw_response.list_agents( uuid="uuid", ) @@ -534,7 +536,7 @@ async def test_raw_response_list_agents(self, async_client: AsyncDigitaloceanGen @pytest.mark.skip() @parametrize - async def test_streaming_response_list_agents(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_list_agents(self, async_client: AsyncGradientAI) -> None: async with async_client.providers.anthropic.keys.with_streaming_response.list_agents( uuid="uuid", ) as response: @@ -548,7 +550,7 @@ async def test_streaming_response_list_agents(self, async_client: AsyncDigitaloc @pytest.mark.skip() @parametrize - async def test_path_params_list_agents(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_list_agents(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.providers.anthropic.keys.with_raw_response.list_agents( uuid="", diff --git a/tests/api_resources/providers/openai/test_keys.py b/tests/api_resources/providers/openai/test_keys.py index b88b6a5f..ce5cb4f5 100644 --- a/tests/api_resources/providers/openai/test_keys.py +++ b/tests/api_resources/providers/openai/test_keys.py @@ -7,9 +7,9 @@ import pytest +from gradientai import GradientAI, AsyncGradientAI from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types.providers.openai import ( +from gradientai.types.providers.openai import ( KeyListResponse, KeyCreateResponse, KeyDeleteResponse, @@ -26,13 +26,13 @@ class TestKeys: @pytest.mark.skip() @parametrize - def test_method_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_create(self, client: GradientAI) -> None: key = client.providers.openai.keys.create() assert_matches_type(KeyCreateResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_create_with_all_params(self, client: GradientAI) -> None: key = client.providers.openai.keys.create( api_key="api_key", name="name", @@ -41,7 +41,7 @@ def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_create(self, client: GradientAI) -> None: response = client.providers.openai.keys.with_raw_response.create() assert response.is_closed is True @@ -51,7 +51,7 @@ def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_create(self, client: GradientAI) -> None: with client.providers.openai.keys.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -63,7 +63,7 @@ def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_retrieve(self, client: GradientAI) -> None: key = client.providers.openai.keys.retrieve( "api_key_uuid", ) @@ -71,7 +71,7 @@ def test_method_retrieve(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_retrieve(self, client: GradientAI) -> None: response = client.providers.openai.keys.with_raw_response.retrieve( "api_key_uuid", ) @@ -83,7 +83,7 @@ def test_raw_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_retrieve(self, client: GradientAI) -> None: with client.providers.openai.keys.with_streaming_response.retrieve( "api_key_uuid", ) as response: @@ -97,7 +97,7 @@ def test_streaming_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_retrieve(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): client.providers.openai.keys.with_raw_response.retrieve( "", @@ -105,7 +105,7 @@ def test_path_params_retrieve(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update(self, client: GradientAI) -> None: key = client.providers.openai.keys.update( path_api_key_uuid="api_key_uuid", ) @@ -113,7 +113,7 @@ def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update_with_all_params(self, client: GradientAI) -> None: key = client.providers.openai.keys.update( path_api_key_uuid="api_key_uuid", api_key="api_key", @@ -124,7 +124,7 @@ def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_update(self, client: GradientAI) -> None: response = client.providers.openai.keys.with_raw_response.update( path_api_key_uuid="api_key_uuid", ) @@ -136,7 +136,7 @@ def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_update(self, client: GradientAI) -> None: with client.providers.openai.keys.with_streaming_response.update( path_api_key_uuid="api_key_uuid", ) as response: @@ -150,7 +150,7 @@ def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_update(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): client.providers.openai.keys.with_raw_response.update( path_api_key_uuid="", @@ -158,13 +158,13 @@ def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list(self, client: GradientAI) -> None: key = client.providers.openai.keys.list() assert_matches_type(KeyListResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list_with_all_params(self, client: GradientAI) -> None: key = client.providers.openai.keys.list( page=0, per_page=0, @@ -173,7 +173,7 @@ def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_list(self, client: GradientAI) -> None: response = client.providers.openai.keys.with_raw_response.list() assert response.is_closed is True @@ -183,7 +183,7 @@ def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_list(self, client: GradientAI) -> None: with client.providers.openai.keys.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -195,7 +195,7 @@ def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_delete(self, client: GradientAI) -> None: key = client.providers.openai.keys.delete( "api_key_uuid", ) @@ -203,7 +203,7 @@ def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_delete(self, client: GradientAI) -> None: response = client.providers.openai.keys.with_raw_response.delete( "api_key_uuid", ) @@ -215,7 +215,7 @@ def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_delete(self, client: GradientAI) -> None: with client.providers.openai.keys.with_streaming_response.delete( "api_key_uuid", ) as response: @@ -229,7 +229,7 @@ def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_delete(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): client.providers.openai.keys.with_raw_response.delete( "", @@ -237,7 +237,7 @@ def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve_agents(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_retrieve_agents(self, client: GradientAI) -> None: key = client.providers.openai.keys.retrieve_agents( uuid="uuid", ) @@ -245,7 +245,7 @@ def test_method_retrieve_agents(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve_agents_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_retrieve_agents_with_all_params(self, client: GradientAI) -> None: key = client.providers.openai.keys.retrieve_agents( uuid="uuid", page=0, @@ -255,7 +255,7 @@ def test_method_retrieve_agents_with_all_params(self, client: DigitaloceanGenaiS @pytest.mark.skip() @parametrize - def test_raw_response_retrieve_agents(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_retrieve_agents(self, client: GradientAI) -> None: response = client.providers.openai.keys.with_raw_response.retrieve_agents( uuid="uuid", ) @@ -267,7 +267,7 @@ def test_raw_response_retrieve_agents(self, client: DigitaloceanGenaiSDK) -> Non @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve_agents(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_retrieve_agents(self, client: GradientAI) -> None: with client.providers.openai.keys.with_streaming_response.retrieve_agents( uuid="uuid", ) as response: @@ -281,7 +281,7 @@ def test_streaming_response_retrieve_agents(self, client: DigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - def test_path_params_retrieve_agents(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_retrieve_agents(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.providers.openai.keys.with_raw_response.retrieve_agents( uuid="", @@ -289,17 +289,19 @@ def test_path_params_retrieve_agents(self, client: DigitaloceanGenaiSDK) -> None class TestAsyncKeys: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_create(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.openai.keys.create() assert_matches_type(KeyCreateResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.openai.keys.create( api_key="api_key", name="name", @@ -308,7 +310,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: response = await async_client.providers.openai.keys.with_raw_response.create() assert response.is_closed is True @@ -318,7 +320,7 @@ async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: async with async_client.providers.openai.keys.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -330,7 +332,7 @@ async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.openai.keys.retrieve( "api_key_uuid", ) @@ -338,7 +340,7 @@ async def test_method_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: response = await async_client.providers.openai.keys.with_raw_response.retrieve( "api_key_uuid", ) @@ -350,7 +352,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncDigitaloceanGenaiS @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: async with async_client.providers.openai.keys.with_streaming_response.retrieve( "api_key_uuid", ) as response: @@ -364,7 +366,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncDigitalocean @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): await async_client.providers.openai.keys.with_raw_response.retrieve( "", @@ -372,7 +374,7 @@ async def test_path_params_retrieve(self, async_client: AsyncDigitaloceanGenaiSD @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.openai.keys.update( path_api_key_uuid="api_key_uuid", ) @@ -380,7 +382,7 @@ async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.openai.keys.update( path_api_key_uuid="api_key_uuid", api_key="api_key", @@ -391,7 +393,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: response = await async_client.providers.openai.keys.with_raw_response.update( path_api_key_uuid="api_key_uuid", ) @@ -403,7 +405,7 @@ async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: async with async_client.providers.openai.keys.with_streaming_response.update( path_api_key_uuid="api_key_uuid", ) as response: @@ -417,7 +419,7 @@ async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): await async_client.providers.openai.keys.with_raw_response.update( path_api_key_uuid="", @@ -425,13 +427,13 @@ async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.openai.keys.list() assert_matches_type(KeyListResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.openai.keys.list( page=0, per_page=0, @@ -440,7 +442,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncDigitalocean @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: response = await async_client.providers.openai.keys.with_raw_response.list() assert response.is_closed is True @@ -450,7 +452,7 @@ async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: async with async_client.providers.openai.keys.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -462,7 +464,7 @@ async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGena @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_delete(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.openai.keys.delete( "api_key_uuid", ) @@ -470,7 +472,7 @@ async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: response = await async_client.providers.openai.keys.with_raw_response.delete( "api_key_uuid", ) @@ -482,7 +484,7 @@ async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: async with async_client.providers.openai.keys.with_streaming_response.delete( "api_key_uuid", ) as response: @@ -496,7 +498,7 @@ async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): await async_client.providers.openai.keys.with_raw_response.delete( "", @@ -504,7 +506,7 @@ async def test_path_params_delete(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_retrieve_agents(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_retrieve_agents(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.openai.keys.retrieve_agents( uuid="uuid", ) @@ -512,7 +514,7 @@ async def test_method_retrieve_agents(self, async_client: AsyncDigitaloceanGenai @pytest.mark.skip() @parametrize - async def test_method_retrieve_agents_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_retrieve_agents_with_all_params(self, async_client: AsyncGradientAI) -> None: key = await async_client.providers.openai.keys.retrieve_agents( uuid="uuid", page=0, @@ -522,7 +524,7 @@ async def test_method_retrieve_agents_with_all_params(self, async_client: AsyncD @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve_agents(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_retrieve_agents(self, async_client: AsyncGradientAI) -> None: response = await async_client.providers.openai.keys.with_raw_response.retrieve_agents( uuid="uuid", ) @@ -534,7 +536,7 @@ async def test_raw_response_retrieve_agents(self, async_client: AsyncDigitalocea @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve_agents(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_retrieve_agents(self, async_client: AsyncGradientAI) -> None: async with async_client.providers.openai.keys.with_streaming_response.retrieve_agents( uuid="uuid", ) as response: @@ -548,7 +550,7 @@ async def test_streaming_response_retrieve_agents(self, async_client: AsyncDigit @pytest.mark.skip() @parametrize - async def test_path_params_retrieve_agents(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_retrieve_agents(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.providers.openai.keys.with_raw_response.retrieve_agents( uuid="", diff --git a/tests/api_resources/auth/agents/__init__.py b/tests/api_resources/regions/__init__.py similarity index 100% rename from tests/api_resources/auth/agents/__init__.py rename to tests/api_resources/regions/__init__.py diff --git a/src/digitalocean_genai_sdk/types/providers/__init__.py b/tests/api_resources/regions/evaluation_runs/__init__.py similarity index 70% rename from src/digitalocean_genai_sdk/types/providers/__init__.py rename to tests/api_resources/regions/evaluation_runs/__init__.py index f8ee8b14..fd8019a9 100644 --- a/src/digitalocean_genai_sdk/types/providers/__init__.py +++ b/tests/api_resources/regions/evaluation_runs/__init__.py @@ -1,3 +1 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations diff --git a/tests/api_resources/regions/evaluation_runs/test_results.py b/tests/api_resources/regions/evaluation_runs/test_results.py new file mode 100644 index 00000000..29deb8b2 --- /dev/null +++ b/tests/api_resources/regions/evaluation_runs/test_results.py @@ -0,0 +1,200 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from gradientai import GradientAI, AsyncGradientAI +from tests.utils import assert_matches_type +from gradientai.types.regions.evaluation_runs import ResultRetrieveResponse, ResultRetrievePromptResponse + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestResults: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_retrieve(self, client: GradientAI) -> None: + result = client.regions.evaluation_runs.results.retrieve( + "evaluation_run_uuid", + ) + assert_matches_type(ResultRetrieveResponse, result, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_retrieve(self, client: GradientAI) -> None: + response = client.regions.evaluation_runs.results.with_raw_response.retrieve( + "evaluation_run_uuid", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + result = response.parse() + assert_matches_type(ResultRetrieveResponse, result, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_retrieve(self, client: GradientAI) -> None: + with client.regions.evaluation_runs.results.with_streaming_response.retrieve( + "evaluation_run_uuid", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + result = response.parse() + assert_matches_type(ResultRetrieveResponse, result, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_retrieve(self, client: GradientAI) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): + client.regions.evaluation_runs.results.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + def test_method_retrieve_prompt(self, client: GradientAI) -> None: + result = client.regions.evaluation_runs.results.retrieve_prompt( + prompt_id=0, + evaluation_run_uuid="evaluation_run_uuid", + ) + assert_matches_type(ResultRetrievePromptResponse, result, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_retrieve_prompt(self, client: GradientAI) -> None: + response = client.regions.evaluation_runs.results.with_raw_response.retrieve_prompt( + prompt_id=0, + evaluation_run_uuid="evaluation_run_uuid", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + result = response.parse() + assert_matches_type(ResultRetrievePromptResponse, result, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_retrieve_prompt(self, client: GradientAI) -> None: + with client.regions.evaluation_runs.results.with_streaming_response.retrieve_prompt( + prompt_id=0, + evaluation_run_uuid="evaluation_run_uuid", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + result = response.parse() + assert_matches_type(ResultRetrievePromptResponse, result, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_retrieve_prompt(self, client: GradientAI) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): + client.regions.evaluation_runs.results.with_raw_response.retrieve_prompt( + prompt_id=0, + evaluation_run_uuid="", + ) + + +class TestAsyncResults: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + result = await async_client.regions.evaluation_runs.results.retrieve( + "evaluation_run_uuid", + ) + assert_matches_type(ResultRetrieveResponse, result, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + response = await async_client.regions.evaluation_runs.results.with_raw_response.retrieve( + "evaluation_run_uuid", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + result = await response.parse() + assert_matches_type(ResultRetrieveResponse, result, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async with async_client.regions.evaluation_runs.results.with_streaming_response.retrieve( + "evaluation_run_uuid", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + result = await response.parse() + assert_matches_type(ResultRetrieveResponse, result, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): + await async_client.regions.evaluation_runs.results.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_retrieve_prompt(self, async_client: AsyncGradientAI) -> None: + result = await async_client.regions.evaluation_runs.results.retrieve_prompt( + prompt_id=0, + evaluation_run_uuid="evaluation_run_uuid", + ) + assert_matches_type(ResultRetrievePromptResponse, result, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_retrieve_prompt(self, async_client: AsyncGradientAI) -> None: + response = await async_client.regions.evaluation_runs.results.with_raw_response.retrieve_prompt( + prompt_id=0, + evaluation_run_uuid="evaluation_run_uuid", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + result = await response.parse() + assert_matches_type(ResultRetrievePromptResponse, result, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_retrieve_prompt(self, async_client: AsyncGradientAI) -> None: + async with async_client.regions.evaluation_runs.results.with_streaming_response.retrieve_prompt( + prompt_id=0, + evaluation_run_uuid="evaluation_run_uuid", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + result = await response.parse() + assert_matches_type(ResultRetrievePromptResponse, result, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_retrieve_prompt(self, async_client: AsyncGradientAI) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): + await async_client.regions.evaluation_runs.results.with_raw_response.retrieve_prompt( + prompt_id=0, + evaluation_run_uuid="", + ) diff --git a/tests/api_resources/regions/test_evaluation_datasets.py b/tests/api_resources/regions/test_evaluation_datasets.py new file mode 100644 index 00000000..3e3da0fe --- /dev/null +++ b/tests/api_resources/regions/test_evaluation_datasets.py @@ -0,0 +1,211 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from gradientai import GradientAI, AsyncGradientAI +from tests.utils import assert_matches_type +from gradientai.types.regions import ( + EvaluationDatasetCreateResponse, + EvaluationDatasetCreateFileUploadPresignedURLsResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestEvaluationDatasets: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_create(self, client: GradientAI) -> None: + evaluation_dataset = client.regions.evaluation_datasets.create() + assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_create_with_all_params(self, client: GradientAI) -> None: + evaluation_dataset = client.regions.evaluation_datasets.create( + file_upload_dataset={ + "original_file_name": "original_file_name", + "size_in_bytes": "size_in_bytes", + "stored_object_key": "stored_object_key", + }, + name="name", + ) + assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_create(self, client: GradientAI) -> None: + response = client.regions.evaluation_datasets.with_raw_response.create() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_dataset = response.parse() + assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_create(self, client: GradientAI) -> None: + with client.regions.evaluation_datasets.with_streaming_response.create() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_dataset = response.parse() + assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_method_create_file_upload_presigned_urls(self, client: GradientAI) -> None: + evaluation_dataset = client.regions.evaluation_datasets.create_file_upload_presigned_urls() + assert_matches_type( + EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"] + ) + + @pytest.mark.skip() + @parametrize + def test_method_create_file_upload_presigned_urls_with_all_params(self, client: GradientAI) -> None: + evaluation_dataset = client.regions.evaluation_datasets.create_file_upload_presigned_urls( + files=[ + { + "file_name": "file_name", + "file_size": "file_size", + } + ], + ) + assert_matches_type( + EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"] + ) + + @pytest.mark.skip() + @parametrize + def test_raw_response_create_file_upload_presigned_urls(self, client: GradientAI) -> None: + response = client.regions.evaluation_datasets.with_raw_response.create_file_upload_presigned_urls() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_dataset = response.parse() + assert_matches_type( + EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"] + ) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_create_file_upload_presigned_urls(self, client: GradientAI) -> None: + with client.regions.evaluation_datasets.with_streaming_response.create_file_upload_presigned_urls() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_dataset = response.parse() + assert_matches_type( + EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"] + ) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncEvaluationDatasets: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_create(self, async_client: AsyncGradientAI) -> None: + evaluation_dataset = await async_client.regions.evaluation_datasets.create() + assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + evaluation_dataset = await async_client.regions.evaluation_datasets.create( + file_upload_dataset={ + "original_file_name": "original_file_name", + "size_in_bytes": "size_in_bytes", + "stored_object_key": "stored_object_key", + }, + name="name", + ) + assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + response = await async_client.regions.evaluation_datasets.with_raw_response.create() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_dataset = await response.parse() + assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async with async_client.regions.evaluation_datasets.with_streaming_response.create() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_dataset = await response.parse() + assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_method_create_file_upload_presigned_urls(self, async_client: AsyncGradientAI) -> None: + evaluation_dataset = await async_client.regions.evaluation_datasets.create_file_upload_presigned_urls() + assert_matches_type( + EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_create_file_upload_presigned_urls_with_all_params( + self, async_client: AsyncGradientAI + ) -> None: + evaluation_dataset = await async_client.regions.evaluation_datasets.create_file_upload_presigned_urls( + files=[ + { + "file_name": "file_name", + "file_size": "file_size", + } + ], + ) + assert_matches_type( + EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"] + ) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_create_file_upload_presigned_urls(self, async_client: AsyncGradientAI) -> None: + response = await async_client.regions.evaluation_datasets.with_raw_response.create_file_upload_presigned_urls() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_dataset = await response.parse() + assert_matches_type( + EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"] + ) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_create_file_upload_presigned_urls(self, async_client: AsyncGradientAI) -> None: + async with ( + async_client.regions.evaluation_datasets.with_streaming_response.create_file_upload_presigned_urls() + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_dataset = await response.parse() + assert_matches_type( + EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"] + ) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/regions/test_evaluation_runs.py b/tests/api_resources/regions/test_evaluation_runs.py new file mode 100644 index 00000000..b2d3c634 --- /dev/null +++ b/tests/api_resources/regions/test_evaluation_runs.py @@ -0,0 +1,187 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from gradientai import GradientAI, AsyncGradientAI +from tests.utils import assert_matches_type +from gradientai.types.regions import ( + EvaluationRunCreateResponse, + EvaluationRunRetrieveResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestEvaluationRuns: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_create(self, client: GradientAI) -> None: + evaluation_run = client.regions.evaluation_runs.create() + assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_create_with_all_params(self, client: GradientAI) -> None: + evaluation_run = client.regions.evaluation_runs.create( + agent_uuid="agent_uuid", + run_name="run_name", + test_case_uuid="test_case_uuid", + ) + assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_create(self, client: GradientAI) -> None: + response = client.regions.evaluation_runs.with_raw_response.create() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_run = response.parse() + assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_create(self, client: GradientAI) -> None: + with client.regions.evaluation_runs.with_streaming_response.create() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_run = response.parse() + assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_method_retrieve(self, client: GradientAI) -> None: + evaluation_run = client.regions.evaluation_runs.retrieve( + "evaluation_run_uuid", + ) + assert_matches_type(EvaluationRunRetrieveResponse, evaluation_run, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_retrieve(self, client: GradientAI) -> None: + response = client.regions.evaluation_runs.with_raw_response.retrieve( + "evaluation_run_uuid", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_run = response.parse() + assert_matches_type(EvaluationRunRetrieveResponse, evaluation_run, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_retrieve(self, client: GradientAI) -> None: + with client.regions.evaluation_runs.with_streaming_response.retrieve( + "evaluation_run_uuid", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_run = response.parse() + assert_matches_type(EvaluationRunRetrieveResponse, evaluation_run, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_retrieve(self, client: GradientAI) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): + client.regions.evaluation_runs.with_raw_response.retrieve( + "", + ) + + +class TestAsyncEvaluationRuns: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_create(self, async_client: AsyncGradientAI) -> None: + evaluation_run = await async_client.regions.evaluation_runs.create() + assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + evaluation_run = await async_client.regions.evaluation_runs.create( + agent_uuid="agent_uuid", + run_name="run_name", + test_case_uuid="test_case_uuid", + ) + assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + response = await async_client.regions.evaluation_runs.with_raw_response.create() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_run = await response.parse() + assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async with async_client.regions.evaluation_runs.with_streaming_response.create() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_run = await response.parse() + assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + evaluation_run = await async_client.regions.evaluation_runs.retrieve( + "evaluation_run_uuid", + ) + assert_matches_type(EvaluationRunRetrieveResponse, evaluation_run, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + response = await async_client.regions.evaluation_runs.with_raw_response.retrieve( + "evaluation_run_uuid", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_run = await response.parse() + assert_matches_type(EvaluationRunRetrieveResponse, evaluation_run, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async with async_client.regions.evaluation_runs.with_streaming_response.retrieve( + "evaluation_run_uuid", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_run = await response.parse() + assert_matches_type(EvaluationRunRetrieveResponse, evaluation_run, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): + await async_client.regions.evaluation_runs.with_raw_response.retrieve( + "", + ) diff --git a/tests/api_resources/regions/test_evaluation_test_cases.py b/tests/api_resources/regions/test_evaluation_test_cases.py new file mode 100644 index 00000000..a01ace90 --- /dev/null +++ b/tests/api_resources/regions/test_evaluation_test_cases.py @@ -0,0 +1,486 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from gradientai import GradientAI, AsyncGradientAI +from tests.utils import assert_matches_type +from gradientai.types.regions import ( + EvaluationTestCaseListResponse, + EvaluationTestCaseCreateResponse, + EvaluationTestCaseUpdateResponse, + EvaluationTestCaseRetrieveResponse, + EvaluationTestCaseListEvaluationRunsResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestEvaluationTestCases: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_create(self, client: GradientAI) -> None: + evaluation_test_case = client.regions.evaluation_test_cases.create() + assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_create_with_all_params(self, client: GradientAI) -> None: + evaluation_test_case = client.regions.evaluation_test_cases.create( + dataset_uuid="dataset_uuid", + description="description", + metrics=["string"], + name="name", + star_metric={ + "metric_uuid": "metric_uuid", + "name": "name", + "success_threshold_pct": 0, + }, + workspace_uuid="workspace_uuid", + ) + assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_create(self, client: GradientAI) -> None: + response = client.regions.evaluation_test_cases.with_raw_response.create() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_test_case = response.parse() + assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_create(self, client: GradientAI) -> None: + with client.regions.evaluation_test_cases.with_streaming_response.create() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_test_case = response.parse() + assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_method_retrieve(self, client: GradientAI) -> None: + evaluation_test_case = client.regions.evaluation_test_cases.retrieve( + "test_case_uuid", + ) + assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_retrieve(self, client: GradientAI) -> None: + response = client.regions.evaluation_test_cases.with_raw_response.retrieve( + "test_case_uuid", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_test_case = response.parse() + assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_retrieve(self, client: GradientAI) -> None: + with client.regions.evaluation_test_cases.with_streaming_response.retrieve( + "test_case_uuid", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_test_case = response.parse() + assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_retrieve(self, client: GradientAI) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `test_case_uuid` but received ''"): + client.regions.evaluation_test_cases.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + def test_method_update(self, client: GradientAI) -> None: + evaluation_test_case = client.regions.evaluation_test_cases.update( + path_test_case_uuid="test_case_uuid", + ) + assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_update_with_all_params(self, client: GradientAI) -> None: + evaluation_test_case = client.regions.evaluation_test_cases.update( + path_test_case_uuid="test_case_uuid", + dataset_uuid="dataset_uuid", + description="description", + metrics={"metric_uuids": ["string"]}, + name="name", + star_metric={ + "metric_uuid": "metric_uuid", + "name": "name", + "success_threshold_pct": 0, + }, + body_test_case_uuid="test_case_uuid", + ) + assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_update(self, client: GradientAI) -> None: + response = client.regions.evaluation_test_cases.with_raw_response.update( + path_test_case_uuid="test_case_uuid", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_test_case = response.parse() + assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_update(self, client: GradientAI) -> None: + with client.regions.evaluation_test_cases.with_streaming_response.update( + path_test_case_uuid="test_case_uuid", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_test_case = response.parse() + assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_update(self, client: GradientAI) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_test_case_uuid` but received ''"): + client.regions.evaluation_test_cases.with_raw_response.update( + path_test_case_uuid="", + ) + + @pytest.mark.skip() + @parametrize + def test_method_list(self, client: GradientAI) -> None: + evaluation_test_case = client.regions.evaluation_test_cases.list() + assert_matches_type(EvaluationTestCaseListResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_list(self, client: GradientAI) -> None: + response = client.regions.evaluation_test_cases.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_test_case = response.parse() + assert_matches_type(EvaluationTestCaseListResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_list(self, client: GradientAI) -> None: + with client.regions.evaluation_test_cases.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_test_case = response.parse() + assert_matches_type(EvaluationTestCaseListResponse, evaluation_test_case, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_method_list_evaluation_runs(self, client: GradientAI) -> None: + evaluation_test_case = client.regions.evaluation_test_cases.list_evaluation_runs( + evaluation_test_case_uuid="evaluation_test_case_uuid", + ) + assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_list_evaluation_runs_with_all_params(self, client: GradientAI) -> None: + evaluation_test_case = client.regions.evaluation_test_cases.list_evaluation_runs( + evaluation_test_case_uuid="evaluation_test_case_uuid", + evaluation_test_case_version=0, + ) + assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_list_evaluation_runs(self, client: GradientAI) -> None: + response = client.regions.evaluation_test_cases.with_raw_response.list_evaluation_runs( + evaluation_test_case_uuid="evaluation_test_case_uuid", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_test_case = response.parse() + assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_list_evaluation_runs(self, client: GradientAI) -> None: + with client.regions.evaluation_test_cases.with_streaming_response.list_evaluation_runs( + evaluation_test_case_uuid="evaluation_test_case_uuid", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_test_case = response.parse() + assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_list_evaluation_runs(self, client: GradientAI) -> None: + with pytest.raises( + ValueError, match=r"Expected a non-empty value for `evaluation_test_case_uuid` but received ''" + ): + client.regions.evaluation_test_cases.with_raw_response.list_evaluation_runs( + evaluation_test_case_uuid="", + ) + + +class TestAsyncEvaluationTestCases: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_create(self, async_client: AsyncGradientAI) -> None: + evaluation_test_case = await async_client.regions.evaluation_test_cases.create() + assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + evaluation_test_case = await async_client.regions.evaluation_test_cases.create( + dataset_uuid="dataset_uuid", + description="description", + metrics=["string"], + name="name", + star_metric={ + "metric_uuid": "metric_uuid", + "name": "name", + "success_threshold_pct": 0, + }, + workspace_uuid="workspace_uuid", + ) + assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + response = await async_client.regions.evaluation_test_cases.with_raw_response.create() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_test_case = await response.parse() + assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async with async_client.regions.evaluation_test_cases.with_streaming_response.create() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_test_case = await response.parse() + assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + evaluation_test_case = await async_client.regions.evaluation_test_cases.retrieve( + "test_case_uuid", + ) + assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + response = await async_client.regions.evaluation_test_cases.with_raw_response.retrieve( + "test_case_uuid", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_test_case = await response.parse() + assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async with async_client.regions.evaluation_test_cases.with_streaming_response.retrieve( + "test_case_uuid", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_test_case = await response.parse() + assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `test_case_uuid` but received ''"): + await async_client.regions.evaluation_test_cases.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_update(self, async_client: AsyncGradientAI) -> None: + evaluation_test_case = await async_client.regions.evaluation_test_cases.update( + path_test_case_uuid="test_case_uuid", + ) + assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + evaluation_test_case = await async_client.regions.evaluation_test_cases.update( + path_test_case_uuid="test_case_uuid", + dataset_uuid="dataset_uuid", + description="description", + metrics={"metric_uuids": ["string"]}, + name="name", + star_metric={ + "metric_uuid": "metric_uuid", + "name": "name", + "success_threshold_pct": 0, + }, + body_test_case_uuid="test_case_uuid", + ) + assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + response = await async_client.regions.evaluation_test_cases.with_raw_response.update( + path_test_case_uuid="test_case_uuid", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_test_case = await response.parse() + assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async with async_client.regions.evaluation_test_cases.with_streaming_response.update( + path_test_case_uuid="test_case_uuid", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_test_case = await response.parse() + assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_test_case_uuid` but received ''"): + await async_client.regions.evaluation_test_cases.with_raw_response.update( + path_test_case_uuid="", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_list(self, async_client: AsyncGradientAI) -> None: + evaluation_test_case = await async_client.regions.evaluation_test_cases.list() + assert_matches_type(EvaluationTestCaseListResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + response = await async_client.regions.evaluation_test_cases.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_test_case = await response.parse() + assert_matches_type(EvaluationTestCaseListResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async with async_client.regions.evaluation_test_cases.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_test_case = await response.parse() + assert_matches_type(EvaluationTestCaseListResponse, evaluation_test_case, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_method_list_evaluation_runs(self, async_client: AsyncGradientAI) -> None: + evaluation_test_case = await async_client.regions.evaluation_test_cases.list_evaluation_runs( + evaluation_test_case_uuid="evaluation_test_case_uuid", + ) + assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_list_evaluation_runs_with_all_params(self, async_client: AsyncGradientAI) -> None: + evaluation_test_case = await async_client.regions.evaluation_test_cases.list_evaluation_runs( + evaluation_test_case_uuid="evaluation_test_case_uuid", + evaluation_test_case_version=0, + ) + assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_list_evaluation_runs(self, async_client: AsyncGradientAI) -> None: + response = await async_client.regions.evaluation_test_cases.with_raw_response.list_evaluation_runs( + evaluation_test_case_uuid="evaluation_test_case_uuid", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluation_test_case = await response.parse() + assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_list_evaluation_runs(self, async_client: AsyncGradientAI) -> None: + async with async_client.regions.evaluation_test_cases.with_streaming_response.list_evaluation_runs( + evaluation_test_case_uuid="evaluation_test_case_uuid", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluation_test_case = await response.parse() + assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_list_evaluation_runs(self, async_client: AsyncGradientAI) -> None: + with pytest.raises( + ValueError, match=r"Expected a non-empty value for `evaluation_test_case_uuid` but received ''" + ): + await async_client.regions.evaluation_test_cases.with_raw_response.list_evaluation_runs( + evaluation_test_case_uuid="", + ) diff --git a/tests/api_resources/test_agents.py b/tests/api_resources/test_agents.py index 3aafae23..2cc0e080 100644 --- a/tests/api_resources/test_agents.py +++ b/tests/api_resources/test_agents.py @@ -7,9 +7,9 @@ import pytest +from gradientai import GradientAI, AsyncGradientAI from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types import ( +from gradientai.types import ( AgentListResponse, AgentCreateResponse, AgentDeleteResponse, @@ -26,13 +26,13 @@ class TestAgents: @pytest.mark.skip() @parametrize - def test_method_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_create(self, client: GradientAI) -> None: agent = client.agents.create() assert_matches_type(AgentCreateResponse, agent, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_create_with_all_params(self, client: GradientAI) -> None: agent = client.agents.create( anthropic_key_uuid="anthropic_key_uuid", description="description", @@ -49,7 +49,7 @@ def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_create(self, client: GradientAI) -> None: response = client.agents.with_raw_response.create() assert response.is_closed is True @@ -59,7 +59,7 @@ def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_create(self, client: GradientAI) -> None: with client.agents.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -71,7 +71,7 @@ def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_retrieve(self, client: GradientAI) -> None: agent = client.agents.retrieve( "uuid", ) @@ -79,7 +79,7 @@ def test_method_retrieve(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_retrieve(self, client: GradientAI) -> None: response = client.agents.with_raw_response.retrieve( "uuid", ) @@ -91,7 +91,7 @@ def test_raw_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_retrieve(self, client: GradientAI) -> None: with client.agents.with_streaming_response.retrieve( "uuid", ) as response: @@ -105,7 +105,7 @@ def test_streaming_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_retrieve(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.agents.with_raw_response.retrieve( "", @@ -113,7 +113,7 @@ def test_path_params_retrieve(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update(self, client: GradientAI) -> None: agent = client.agents.update( path_uuid="uuid", ) @@ -121,7 +121,7 @@ def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update_with_all_params(self, client: GradientAI) -> None: agent = client.agents.update( path_uuid="uuid", anthropic_key_uuid="anthropic_key_uuid", @@ -144,7 +144,7 @@ def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_update(self, client: GradientAI) -> None: response = client.agents.with_raw_response.update( path_uuid="uuid", ) @@ -156,7 +156,7 @@ def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_update(self, client: GradientAI) -> None: with client.agents.with_streaming_response.update( path_uuid="uuid", ) as response: @@ -170,7 +170,7 @@ def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_update(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): client.agents.with_raw_response.update( path_uuid="", @@ -178,13 +178,13 @@ def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list(self, client: GradientAI) -> None: agent = client.agents.list() assert_matches_type(AgentListResponse, agent, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list_with_all_params(self, client: GradientAI) -> None: agent = client.agents.list( only_deployed=True, page=0, @@ -194,7 +194,7 @@ def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_list(self, client: GradientAI) -> None: response = client.agents.with_raw_response.list() assert response.is_closed is True @@ -204,7 +204,7 @@ def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_list(self, client: GradientAI) -> None: with client.agents.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -216,7 +216,7 @@ def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_delete(self, client: GradientAI) -> None: agent = client.agents.delete( "uuid", ) @@ -224,7 +224,7 @@ def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_delete(self, client: GradientAI) -> None: response = client.agents.with_raw_response.delete( "uuid", ) @@ -236,7 +236,7 @@ def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_delete(self, client: GradientAI) -> None: with client.agents.with_streaming_response.delete( "uuid", ) as response: @@ -250,7 +250,7 @@ def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_delete(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.agents.with_raw_response.delete( "", @@ -258,7 +258,7 @@ def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update_status(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update_status(self, client: GradientAI) -> None: agent = client.agents.update_status( path_uuid="uuid", ) @@ -266,7 +266,7 @@ def test_method_update_status(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update_status_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update_status_with_all_params(self, client: GradientAI) -> None: agent = client.agents.update_status( path_uuid="uuid", body_uuid="uuid", @@ -276,7 +276,7 @@ def test_method_update_status_with_all_params(self, client: DigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - def test_raw_response_update_status(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_update_status(self, client: GradientAI) -> None: response = client.agents.with_raw_response.update_status( path_uuid="uuid", ) @@ -288,7 +288,7 @@ def test_raw_response_update_status(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update_status(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_update_status(self, client: GradientAI) -> None: with client.agents.with_streaming_response.update_status( path_uuid="uuid", ) as response: @@ -302,7 +302,7 @@ def test_streaming_response_update_status(self, client: DigitaloceanGenaiSDK) -> @pytest.mark.skip() @parametrize - def test_path_params_update_status(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_update_status(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): client.agents.with_raw_response.update_status( path_uuid="", @@ -310,17 +310,19 @@ def test_path_params_update_status(self, client: DigitaloceanGenaiSDK) -> None: class TestAsyncAgents: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_create(self, async_client: AsyncGradientAI) -> None: agent = await async_client.agents.create() assert_matches_type(AgentCreateResponse, agent, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: agent = await async_client.agents.create( anthropic_key_uuid="anthropic_key_uuid", description="description", @@ -337,7 +339,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.with_raw_response.create() assert response.is_closed is True @@ -347,7 +349,7 @@ async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -359,7 +361,7 @@ async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: agent = await async_client.agents.retrieve( "uuid", ) @@ -367,7 +369,7 @@ async def test_method_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.with_raw_response.retrieve( "uuid", ) @@ -379,7 +381,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncDigitaloceanGenaiS @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.with_streaming_response.retrieve( "uuid", ) as response: @@ -393,7 +395,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncDigitalocean @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.agents.with_raw_response.retrieve( "", @@ -401,7 +403,7 @@ async def test_path_params_retrieve(self, async_client: AsyncDigitaloceanGenaiSD @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update(self, async_client: AsyncGradientAI) -> None: agent = await async_client.agents.update( path_uuid="uuid", ) @@ -409,7 +411,7 @@ async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: agent = await async_client.agents.update( path_uuid="uuid", anthropic_key_uuid="anthropic_key_uuid", @@ -432,7 +434,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.with_raw_response.update( path_uuid="uuid", ) @@ -444,7 +446,7 @@ async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.with_streaming_response.update( path_uuid="uuid", ) as response: @@ -458,7 +460,7 @@ async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): await async_client.agents.with_raw_response.update( path_uuid="", @@ -466,13 +468,13 @@ async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list(self, async_client: AsyncGradientAI) -> None: agent = await async_client.agents.list() assert_matches_type(AgentListResponse, agent, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: agent = await async_client.agents.list( only_deployed=True, page=0, @@ -482,7 +484,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncDigitalocean @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.with_raw_response.list() assert response.is_closed is True @@ -492,7 +494,7 @@ async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -504,7 +506,7 @@ async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGena @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_delete(self, async_client: AsyncGradientAI) -> None: agent = await async_client.agents.delete( "uuid", ) @@ -512,7 +514,7 @@ async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.with_raw_response.delete( "uuid", ) @@ -524,7 +526,7 @@ async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.with_streaming_response.delete( "uuid", ) as response: @@ -538,7 +540,7 @@ async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.agents.with_raw_response.delete( "", @@ -546,7 +548,7 @@ async def test_path_params_delete(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_update_status(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update_status(self, async_client: AsyncGradientAI) -> None: agent = await async_client.agents.update_status( path_uuid="uuid", ) @@ -554,7 +556,7 @@ async def test_method_update_status(self, async_client: AsyncDigitaloceanGenaiSD @pytest.mark.skip() @parametrize - async def test_method_update_status_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update_status_with_all_params(self, async_client: AsyncGradientAI) -> None: agent = await async_client.agents.update_status( path_uuid="uuid", body_uuid="uuid", @@ -564,7 +566,7 @@ async def test_method_update_status_with_all_params(self, async_client: AsyncDig @pytest.mark.skip() @parametrize - async def test_raw_response_update_status(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_update_status(self, async_client: AsyncGradientAI) -> None: response = await async_client.agents.with_raw_response.update_status( path_uuid="uuid", ) @@ -576,7 +578,7 @@ async def test_raw_response_update_status(self, async_client: AsyncDigitaloceanG @pytest.mark.skip() @parametrize - async def test_streaming_response_update_status(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_update_status(self, async_client: AsyncGradientAI) -> None: async with async_client.agents.with_streaming_response.update_status( path_uuid="uuid", ) as response: @@ -590,7 +592,7 @@ async def test_streaming_response_update_status(self, async_client: AsyncDigital @pytest.mark.skip() @parametrize - async def test_path_params_update_status(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_update_status(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): await async_client.agents.with_raw_response.update_status( path_uuid="", diff --git a/tests/api_resources/test_api_keys.py b/tests/api_resources/test_api_keys.py deleted file mode 100644 index 198eb261..00000000 --- a/tests/api_resources/test_api_keys.py +++ /dev/null @@ -1,100 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -import os -from typing import Any, cast - -import pytest - -from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types import APIKeyListResponse - -base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") - - -class TestAPIKeys: - parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - - @pytest.mark.skip() - @parametrize - def test_method_list(self, client: DigitaloceanGenaiSDK) -> None: - api_key = client.api_keys.list() - assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - - @pytest.mark.skip() - @parametrize - def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: - api_key = client.api_keys.list( - page=0, - per_page=0, - public_only=True, - usecases=["MODEL_USECASE_UNKNOWN"], - ) - assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - - @pytest.mark.skip() - @parametrize - def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: - response = client.api_keys.with_raw_response.list() - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - api_key = response.parse() - assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - - @pytest.mark.skip() - @parametrize - def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: - with client.api_keys.with_streaming_response.list() as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - api_key = response.parse() - assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - - assert cast(Any, response.is_closed) is True - - -class TestAsyncAPIKeys: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) - - @pytest.mark.skip() - @parametrize - async def test_method_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - api_key = await async_client.api_keys.list() - assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - - @pytest.mark.skip() - @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - api_key = await async_client.api_keys.list( - page=0, - per_page=0, - public_only=True, - usecases=["MODEL_USECASE_UNKNOWN"], - ) - assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - - @pytest.mark.skip() - @parametrize - async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - response = await async_client.api_keys.with_raw_response.list() - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - api_key = await response.parse() - assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - - @pytest.mark.skip() - @parametrize - async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - async with async_client.api_keys.with_streaming_response.list() as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - api_key = await response.parse() - assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - - assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/test_embeddings.py b/tests/api_resources/test_embeddings.py deleted file mode 100644 index ea1b5879..00000000 --- a/tests/api_resources/test_embeddings.py +++ /dev/null @@ -1,116 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -import os -from typing import Any, cast - -import pytest - -from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types import EmbeddingCreateResponse - -base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") - - -class TestEmbeddings: - parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - - @pytest.mark.skip() - @parametrize - def test_method_create(self, client: DigitaloceanGenaiSDK) -> None: - embedding = client.embeddings.create( - input="The quick brown fox jumped over the lazy dog", - model="text-embedding-3-small", - ) - assert_matches_type(EmbeddingCreateResponse, embedding, path=["response"]) - - @pytest.mark.skip() - @parametrize - def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: - embedding = client.embeddings.create( - input="The quick brown fox jumped over the lazy dog", - model="text-embedding-3-small", - user="user-1234", - ) - assert_matches_type(EmbeddingCreateResponse, embedding, path=["response"]) - - @pytest.mark.skip() - @parametrize - def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: - response = client.embeddings.with_raw_response.create( - input="The quick brown fox jumped over the lazy dog", - model="text-embedding-3-small", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - embedding = response.parse() - assert_matches_type(EmbeddingCreateResponse, embedding, path=["response"]) - - @pytest.mark.skip() - @parametrize - def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: - with client.embeddings.with_streaming_response.create( - input="The quick brown fox jumped over the lazy dog", - model="text-embedding-3-small", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - embedding = response.parse() - assert_matches_type(EmbeddingCreateResponse, embedding, path=["response"]) - - assert cast(Any, response.is_closed) is True - - -class TestAsyncEmbeddings: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) - - @pytest.mark.skip() - @parametrize - async def test_method_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - embedding = await async_client.embeddings.create( - input="The quick brown fox jumped over the lazy dog", - model="text-embedding-3-small", - ) - assert_matches_type(EmbeddingCreateResponse, embedding, path=["response"]) - - @pytest.mark.skip() - @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - embedding = await async_client.embeddings.create( - input="The quick brown fox jumped over the lazy dog", - model="text-embedding-3-small", - user="user-1234", - ) - assert_matches_type(EmbeddingCreateResponse, embedding, path=["response"]) - - @pytest.mark.skip() - @parametrize - async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - response = await async_client.embeddings.with_raw_response.create( - input="The quick brown fox jumped over the lazy dog", - model="text-embedding-3-small", - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - embedding = await response.parse() - assert_matches_type(EmbeddingCreateResponse, embedding, path=["response"]) - - @pytest.mark.skip() - @parametrize - async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - async with async_client.embeddings.with_streaming_response.create( - input="The quick brown fox jumped over the lazy dog", - model="text-embedding-3-small", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - embedding = await response.parse() - assert_matches_type(EmbeddingCreateResponse, embedding, path=["response"]) - - assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/test_indexing_jobs.py b/tests/api_resources/test_indexing_jobs.py index 9ae7ec50..6a50d9b5 100644 --- a/tests/api_resources/test_indexing_jobs.py +++ b/tests/api_resources/test_indexing_jobs.py @@ -7,9 +7,9 @@ import pytest +from gradientai import GradientAI, AsyncGradientAI from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types import ( +from gradientai.types import ( IndexingJobListResponse, IndexingJobCreateResponse, IndexingJobRetrieveResponse, @@ -25,13 +25,13 @@ class TestIndexingJobs: @pytest.mark.skip() @parametrize - def test_method_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_create(self, client: GradientAI) -> None: indexing_job = client.indexing_jobs.create() assert_matches_type(IndexingJobCreateResponse, indexing_job, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_create_with_all_params(self, client: GradientAI) -> None: indexing_job = client.indexing_jobs.create( data_source_uuids=["string"], knowledge_base_uuid="knowledge_base_uuid", @@ -40,7 +40,7 @@ def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_create(self, client: GradientAI) -> None: response = client.indexing_jobs.with_raw_response.create() assert response.is_closed is True @@ -50,7 +50,7 @@ def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_create(self, client: GradientAI) -> None: with client.indexing_jobs.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -62,7 +62,7 @@ def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_retrieve(self, client: GradientAI) -> None: indexing_job = client.indexing_jobs.retrieve( "uuid", ) @@ -70,7 +70,7 @@ def test_method_retrieve(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_retrieve(self, client: GradientAI) -> None: response = client.indexing_jobs.with_raw_response.retrieve( "uuid", ) @@ -82,7 +82,7 @@ def test_raw_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_retrieve(self, client: GradientAI) -> None: with client.indexing_jobs.with_streaming_response.retrieve( "uuid", ) as response: @@ -96,7 +96,7 @@ def test_streaming_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_retrieve(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.indexing_jobs.with_raw_response.retrieve( "", @@ -104,13 +104,13 @@ def test_path_params_retrieve(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list(self, client: GradientAI) -> None: indexing_job = client.indexing_jobs.list() assert_matches_type(IndexingJobListResponse, indexing_job, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list_with_all_params(self, client: GradientAI) -> None: indexing_job = client.indexing_jobs.list( page=0, per_page=0, @@ -119,7 +119,7 @@ def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_list(self, client: GradientAI) -> None: response = client.indexing_jobs.with_raw_response.list() assert response.is_closed is True @@ -129,7 +129,7 @@ def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_list(self, client: GradientAI) -> None: with client.indexing_jobs.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -141,7 +141,7 @@ def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve_data_sources(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_retrieve_data_sources(self, client: GradientAI) -> None: indexing_job = client.indexing_jobs.retrieve_data_sources( "indexing_job_uuid", ) @@ -149,7 +149,7 @@ def test_method_retrieve_data_sources(self, client: DigitaloceanGenaiSDK) -> Non @pytest.mark.skip() @parametrize - def test_raw_response_retrieve_data_sources(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_retrieve_data_sources(self, client: GradientAI) -> None: response = client.indexing_jobs.with_raw_response.retrieve_data_sources( "indexing_job_uuid", ) @@ -161,7 +161,7 @@ def test_raw_response_retrieve_data_sources(self, client: DigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve_data_sources(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_retrieve_data_sources(self, client: GradientAI) -> None: with client.indexing_jobs.with_streaming_response.retrieve_data_sources( "indexing_job_uuid", ) as response: @@ -175,7 +175,7 @@ def test_streaming_response_retrieve_data_sources(self, client: DigitaloceanGena @pytest.mark.skip() @parametrize - def test_path_params_retrieve_data_sources(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_retrieve_data_sources(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `indexing_job_uuid` but received ''"): client.indexing_jobs.with_raw_response.retrieve_data_sources( "", @@ -183,7 +183,7 @@ def test_path_params_retrieve_data_sources(self, client: DigitaloceanGenaiSDK) - @pytest.mark.skip() @parametrize - def test_method_update_cancel(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update_cancel(self, client: GradientAI) -> None: indexing_job = client.indexing_jobs.update_cancel( path_uuid="uuid", ) @@ -191,7 +191,7 @@ def test_method_update_cancel(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update_cancel_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update_cancel_with_all_params(self, client: GradientAI) -> None: indexing_job = client.indexing_jobs.update_cancel( path_uuid="uuid", body_uuid="uuid", @@ -200,7 +200,7 @@ def test_method_update_cancel_with_all_params(self, client: DigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - def test_raw_response_update_cancel(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_update_cancel(self, client: GradientAI) -> None: response = client.indexing_jobs.with_raw_response.update_cancel( path_uuid="uuid", ) @@ -212,7 +212,7 @@ def test_raw_response_update_cancel(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update_cancel(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_update_cancel(self, client: GradientAI) -> None: with client.indexing_jobs.with_streaming_response.update_cancel( path_uuid="uuid", ) as response: @@ -226,7 +226,7 @@ def test_streaming_response_update_cancel(self, client: DigitaloceanGenaiSDK) -> @pytest.mark.skip() @parametrize - def test_path_params_update_cancel(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_update_cancel(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): client.indexing_jobs.with_raw_response.update_cancel( path_uuid="", @@ -234,17 +234,19 @@ def test_path_params_update_cancel(self, client: DigitaloceanGenaiSDK) -> None: class TestAsyncIndexingJobs: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_create(self, async_client: AsyncGradientAI) -> None: indexing_job = await async_client.indexing_jobs.create() assert_matches_type(IndexingJobCreateResponse, indexing_job, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: indexing_job = await async_client.indexing_jobs.create( data_source_uuids=["string"], knowledge_base_uuid="knowledge_base_uuid", @@ -253,7 +255,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: response = await async_client.indexing_jobs.with_raw_response.create() assert response.is_closed is True @@ -263,7 +265,7 @@ async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: async with async_client.indexing_jobs.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -275,7 +277,7 @@ async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: indexing_job = await async_client.indexing_jobs.retrieve( "uuid", ) @@ -283,7 +285,7 @@ async def test_method_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: response = await async_client.indexing_jobs.with_raw_response.retrieve( "uuid", ) @@ -295,7 +297,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncDigitaloceanGenaiS @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: async with async_client.indexing_jobs.with_streaming_response.retrieve( "uuid", ) as response: @@ -309,7 +311,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncDigitalocean @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.indexing_jobs.with_raw_response.retrieve( "", @@ -317,13 +319,13 @@ async def test_path_params_retrieve(self, async_client: AsyncDigitaloceanGenaiSD @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list(self, async_client: AsyncGradientAI) -> None: indexing_job = await async_client.indexing_jobs.list() assert_matches_type(IndexingJobListResponse, indexing_job, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: indexing_job = await async_client.indexing_jobs.list( page=0, per_page=0, @@ -332,7 +334,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncDigitalocean @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: response = await async_client.indexing_jobs.with_raw_response.list() assert response.is_closed is True @@ -342,7 +344,7 @@ async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: async with async_client.indexing_jobs.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -354,7 +356,7 @@ async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGena @pytest.mark.skip() @parametrize - async def test_method_retrieve_data_sources(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_retrieve_data_sources(self, async_client: AsyncGradientAI) -> None: indexing_job = await async_client.indexing_jobs.retrieve_data_sources( "indexing_job_uuid", ) @@ -362,7 +364,7 @@ async def test_method_retrieve_data_sources(self, async_client: AsyncDigitalocea @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve_data_sources(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_retrieve_data_sources(self, async_client: AsyncGradientAI) -> None: response = await async_client.indexing_jobs.with_raw_response.retrieve_data_sources( "indexing_job_uuid", ) @@ -374,7 +376,7 @@ async def test_raw_response_retrieve_data_sources(self, async_client: AsyncDigit @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve_data_sources(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_retrieve_data_sources(self, async_client: AsyncGradientAI) -> None: async with async_client.indexing_jobs.with_streaming_response.retrieve_data_sources( "indexing_job_uuid", ) as response: @@ -388,7 +390,7 @@ async def test_streaming_response_retrieve_data_sources(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_path_params_retrieve_data_sources(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_retrieve_data_sources(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `indexing_job_uuid` but received ''"): await async_client.indexing_jobs.with_raw_response.retrieve_data_sources( "", @@ -396,7 +398,7 @@ async def test_path_params_retrieve_data_sources(self, async_client: AsyncDigita @pytest.mark.skip() @parametrize - async def test_method_update_cancel(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update_cancel(self, async_client: AsyncGradientAI) -> None: indexing_job = await async_client.indexing_jobs.update_cancel( path_uuid="uuid", ) @@ -404,7 +406,7 @@ async def test_method_update_cancel(self, async_client: AsyncDigitaloceanGenaiSD @pytest.mark.skip() @parametrize - async def test_method_update_cancel_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update_cancel_with_all_params(self, async_client: AsyncGradientAI) -> None: indexing_job = await async_client.indexing_jobs.update_cancel( path_uuid="uuid", body_uuid="uuid", @@ -413,7 +415,7 @@ async def test_method_update_cancel_with_all_params(self, async_client: AsyncDig @pytest.mark.skip() @parametrize - async def test_raw_response_update_cancel(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_update_cancel(self, async_client: AsyncGradientAI) -> None: response = await async_client.indexing_jobs.with_raw_response.update_cancel( path_uuid="uuid", ) @@ -425,7 +427,7 @@ async def test_raw_response_update_cancel(self, async_client: AsyncDigitaloceanG @pytest.mark.skip() @parametrize - async def test_streaming_response_update_cancel(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_update_cancel(self, async_client: AsyncGradientAI) -> None: async with async_client.indexing_jobs.with_streaming_response.update_cancel( path_uuid="uuid", ) as response: @@ -439,7 +441,7 @@ async def test_streaming_response_update_cancel(self, async_client: AsyncDigital @pytest.mark.skip() @parametrize - async def test_path_params_update_cancel(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_update_cancel(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): await async_client.indexing_jobs.with_raw_response.update_cancel( path_uuid="", diff --git a/tests/api_resources/test_knowledge_bases.py b/tests/api_resources/test_knowledge_bases.py index 34e3d753..508820ce 100644 --- a/tests/api_resources/test_knowledge_bases.py +++ b/tests/api_resources/test_knowledge_bases.py @@ -7,9 +7,9 @@ import pytest +from gradientai import GradientAI, AsyncGradientAI from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types import ( +from gradientai.types import ( KnowledgeBaseListResponse, KnowledgeBaseCreateResponse, KnowledgeBaseDeleteResponse, @@ -25,17 +25,24 @@ class TestKnowledgeBases: @pytest.mark.skip() @parametrize - def test_method_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_create(self, client: GradientAI) -> None: knowledge_base = client.knowledge_bases.create() assert_matches_type(KnowledgeBaseCreateResponse, knowledge_base, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_create_with_all_params(self, client: GradientAI) -> None: knowledge_base = client.knowledge_bases.create( database_id="database_id", datasources=[ { + "aws_data_source": { + "bucket_name": "bucket_name", + "item_path": "item_path", + "key_id": "key_id", + "region": "region", + "secret_key": "secret_key", + }, "bucket_name": "bucket_name", "bucket_region": "bucket_region", "file_upload_data_source": { @@ -67,7 +74,7 @@ def test_method_create_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_create(self, client: GradientAI) -> None: response = client.knowledge_bases.with_raw_response.create() assert response.is_closed is True @@ -77,7 +84,7 @@ def test_raw_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_create(self, client: GradientAI) -> None: with client.knowledge_bases.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -89,7 +96,7 @@ def test_streaming_response_create(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_retrieve(self, client: GradientAI) -> None: knowledge_base = client.knowledge_bases.retrieve( "uuid", ) @@ -97,7 +104,7 @@ def test_method_retrieve(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_retrieve(self, client: GradientAI) -> None: response = client.knowledge_bases.with_raw_response.retrieve( "uuid", ) @@ -109,7 +116,7 @@ def test_raw_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_retrieve(self, client: GradientAI) -> None: with client.knowledge_bases.with_streaming_response.retrieve( "uuid", ) as response: @@ -123,7 +130,7 @@ def test_streaming_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_retrieve(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.knowledge_bases.with_raw_response.retrieve( "", @@ -131,7 +138,7 @@ def test_path_params_retrieve(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update(self, client: GradientAI) -> None: knowledge_base = client.knowledge_bases.update( path_uuid="uuid", ) @@ -139,7 +146,7 @@ def test_method_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_update_with_all_params(self, client: GradientAI) -> None: knowledge_base = client.knowledge_bases.update( path_uuid="uuid", database_id="database_id", @@ -153,7 +160,7 @@ def test_method_update_with_all_params(self, client: DigitaloceanGenaiSDK) -> No @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_update(self, client: GradientAI) -> None: response = client.knowledge_bases.with_raw_response.update( path_uuid="uuid", ) @@ -165,7 +172,7 @@ def test_raw_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_update(self, client: GradientAI) -> None: with client.knowledge_bases.with_streaming_response.update( path_uuid="uuid", ) as response: @@ -179,7 +186,7 @@ def test_streaming_response_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_update(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): client.knowledge_bases.with_raw_response.update( path_uuid="", @@ -187,13 +194,13 @@ def test_path_params_update(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list(self, client: GradientAI) -> None: knowledge_base = client.knowledge_bases.list() assert_matches_type(KnowledgeBaseListResponse, knowledge_base, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list_with_all_params(self, client: GradientAI) -> None: knowledge_base = client.knowledge_bases.list( page=0, per_page=0, @@ -202,7 +209,7 @@ def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_list(self, client: GradientAI) -> None: response = client.knowledge_bases.with_raw_response.list() assert response.is_closed is True @@ -212,7 +219,7 @@ def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_list(self, client: GradientAI) -> None: with client.knowledge_bases.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -224,7 +231,7 @@ def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_delete(self, client: GradientAI) -> None: knowledge_base = client.knowledge_bases.delete( "uuid", ) @@ -232,7 +239,7 @@ def test_method_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_delete(self, client: GradientAI) -> None: response = client.knowledge_bases.with_raw_response.delete( "uuid", ) @@ -244,7 +251,7 @@ def test_raw_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_delete(self, client: GradientAI) -> None: with client.knowledge_bases.with_streaming_response.delete( "uuid", ) as response: @@ -258,7 +265,7 @@ def test_streaming_response_delete(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: + def test_path_params_delete(self, client: GradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.knowledge_bases.with_raw_response.delete( "", @@ -266,21 +273,30 @@ def test_path_params_delete(self, client: DigitaloceanGenaiSDK) -> None: class TestAsyncKnowledgeBases: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_create(self, async_client: AsyncGradientAI) -> None: knowledge_base = await async_client.knowledge_bases.create() assert_matches_type(KnowledgeBaseCreateResponse, knowledge_base, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: knowledge_base = await async_client.knowledge_bases.create( database_id="database_id", datasources=[ { + "aws_data_source": { + "bucket_name": "bucket_name", + "item_path": "item_path", + "key_id": "key_id", + "region": "region", + "secret_key": "secret_key", + }, "bucket_name": "bucket_name", "bucket_region": "bucket_region", "file_upload_data_source": { @@ -312,7 +328,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: response = await async_client.knowledge_bases.with_raw_response.create() assert response.is_closed is True @@ -322,7 +338,7 @@ async def test_raw_response_create(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: async with async_client.knowledge_bases.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -334,7 +350,7 @@ async def test_streaming_response_create(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: knowledge_base = await async_client.knowledge_bases.retrieve( "uuid", ) @@ -342,7 +358,7 @@ async def test_method_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: response = await async_client.knowledge_bases.with_raw_response.retrieve( "uuid", ) @@ -354,7 +370,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncDigitaloceanGenaiS @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: async with async_client.knowledge_bases.with_streaming_response.retrieve( "uuid", ) as response: @@ -368,7 +384,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncDigitalocean @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.knowledge_bases.with_raw_response.retrieve( "", @@ -376,7 +392,7 @@ async def test_path_params_retrieve(self, async_client: AsyncDigitaloceanGenaiSD @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update(self, async_client: AsyncGradientAI) -> None: knowledge_base = await async_client.knowledge_bases.update( path_uuid="uuid", ) @@ -384,7 +400,7 @@ async def test_method_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: knowledge_base = await async_client.knowledge_bases.update( path_uuid="uuid", database_id="database_id", @@ -398,7 +414,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncDigitaloce @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: response = await async_client.knowledge_bases.with_raw_response.update( path_uuid="uuid", ) @@ -410,7 +426,7 @@ async def test_raw_response_update(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: async with async_client.knowledge_bases.with_streaming_response.update( path_uuid="uuid", ) as response: @@ -424,7 +440,7 @@ async def test_streaming_response_update(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): await async_client.knowledge_bases.with_raw_response.update( path_uuid="", @@ -432,13 +448,13 @@ async def test_path_params_update(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list(self, async_client: AsyncGradientAI) -> None: knowledge_base = await async_client.knowledge_bases.list() assert_matches_type(KnowledgeBaseListResponse, knowledge_base, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: knowledge_base = await async_client.knowledge_bases.list( page=0, per_page=0, @@ -447,7 +463,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncDigitalocean @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: response = await async_client.knowledge_bases.with_raw_response.list() assert response.is_closed is True @@ -457,7 +473,7 @@ async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: async with async_client.knowledge_bases.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -469,7 +485,7 @@ async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGena @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_delete(self, async_client: AsyncGradientAI) -> None: knowledge_base = await async_client.knowledge_bases.delete( "uuid", ) @@ -477,7 +493,7 @@ async def test_method_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> N @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: response = await async_client.knowledge_bases.with_raw_response.delete( "uuid", ) @@ -489,7 +505,7 @@ async def test_raw_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: async with async_client.knowledge_bases.with_streaming_response.delete( "uuid", ) as response: @@ -503,7 +519,7 @@ async def test_streaming_response_delete(self, async_client: AsyncDigitaloceanGe @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.knowledge_bases.with_raw_response.delete( "", diff --git a/tests/api_resources/test_models.py b/tests/api_resources/test_models.py index 1148affb..5e119f71 100644 --- a/tests/api_resources/test_models.py +++ b/tests/api_resources/test_models.py @@ -7,9 +7,9 @@ import pytest +from gradientai import GradientAI, AsyncGradientAI from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types import Model, ModelListResponse +from gradientai.types import ModelListResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -19,55 +19,24 @@ class TestModels: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: DigitaloceanGenaiSDK) -> None: - model = client.models.retrieve( - "llama3-8b-instruct", - ) - assert_matches_type(Model, model, path=["response"]) + def test_method_list(self, client: GradientAI) -> None: + model = client.models.list() + assert_matches_type(ModelListResponse, model, path=["response"]) @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: - response = client.models.with_raw_response.retrieve( - "llama3-8b-instruct", + def test_method_list_with_all_params(self, client: GradientAI) -> None: + model = client.models.list( + page=0, + per_page=0, + public_only=True, + usecases=["MODEL_USECASE_UNKNOWN"], ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - model = response.parse() - assert_matches_type(Model, model, path=["response"]) - - @pytest.mark.skip() - @parametrize - def test_streaming_response_retrieve(self, client: DigitaloceanGenaiSDK) -> None: - with client.models.with_streaming_response.retrieve( - "llama3-8b-instruct", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - model = response.parse() - assert_matches_type(Model, model, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @pytest.mark.skip() - @parametrize - def test_path_params_retrieve(self, client: DigitaloceanGenaiSDK) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `model` but received ''"): - client.models.with_raw_response.retrieve( - "", - ) - - @pytest.mark.skip() - @parametrize - def test_method_list(self, client: DigitaloceanGenaiSDK) -> None: - model = client.models.list() assert_matches_type(ModelListResponse, model, path=["response"]) @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_list(self, client: GradientAI) -> None: response = client.models.with_raw_response.list() assert response.is_closed is True @@ -77,7 +46,7 @@ def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_list(self, client: GradientAI) -> None: with client.models.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -89,59 +58,30 @@ def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: class TestAsyncModels: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - model = await async_client.models.retrieve( - "llama3-8b-instruct", - ) - assert_matches_type(Model, model, path=["response"]) + async def test_method_list(self, async_client: AsyncGradientAI) -> None: + model = await async_client.models.list() + assert_matches_type(ModelListResponse, model, path=["response"]) @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - response = await async_client.models.with_raw_response.retrieve( - "llama3-8b-instruct", + async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + model = await async_client.models.list( + page=0, + per_page=0, + public_only=True, + usecases=["MODEL_USECASE_UNKNOWN"], ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - model = await response.parse() - assert_matches_type(Model, model, path=["response"]) - - @pytest.mark.skip() - @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - async with async_client.models.with_streaming_response.retrieve( - "llama3-8b-instruct", - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - model = await response.parse() - assert_matches_type(Model, model, path=["response"]) - - assert cast(Any, response.is_closed) is True - - @pytest.mark.skip() - @parametrize - async def test_path_params_retrieve(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - with pytest.raises(ValueError, match=r"Expected a non-empty value for `model` but received ''"): - await async_client.models.with_raw_response.retrieve( - "", - ) - - @pytest.mark.skip() - @parametrize - async def test_method_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: - model = await async_client.models.list() assert_matches_type(ModelListResponse, model, path=["response"]) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: response = await async_client.models.with_raw_response.list() assert response.is_closed is True @@ -151,7 +91,7 @@ async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: async with async_client.models.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/api_resources/test_regions.py b/tests/api_resources/test_regions.py index f36b6c63..9cb24b0a 100644 --- a/tests/api_resources/test_regions.py +++ b/tests/api_resources/test_regions.py @@ -7,9 +7,9 @@ import pytest +from gradientai import GradientAI, AsyncGradientAI from tests.utils import assert_matches_type -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk.types import RegionListResponse +from gradientai.types import RegionListResponse, RegionListEvaluationMetricsResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -19,13 +19,13 @@ class TestRegions: @pytest.mark.skip() @parametrize - def test_method_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list(self, client: GradientAI) -> None: region = client.regions.list() assert_matches_type(RegionListResponse, region, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None: + def test_method_list_with_all_params(self, client: GradientAI) -> None: region = client.regions.list( serves_batch=True, serves_inference=True, @@ -34,7 +34,7 @@ def test_method_list_with_all_params(self, client: DigitaloceanGenaiSDK) -> None @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_raw_response_list(self, client: GradientAI) -> None: response = client.regions.with_raw_response.list() assert response.is_closed is True @@ -44,7 +44,7 @@ def test_raw_response_list(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: + def test_streaming_response_list(self, client: GradientAI) -> None: with client.regions.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -54,19 +54,49 @@ def test_streaming_response_list(self, client: DigitaloceanGenaiSDK) -> None: assert cast(Any, response.is_closed) is True + @pytest.mark.skip() + @parametrize + def test_method_list_evaluation_metrics(self, client: GradientAI) -> None: + region = client.regions.list_evaluation_metrics() + assert_matches_type(RegionListEvaluationMetricsResponse, region, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_list_evaluation_metrics(self, client: GradientAI) -> None: + response = client.regions.with_raw_response.list_evaluation_metrics() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + region = response.parse() + assert_matches_type(RegionListEvaluationMetricsResponse, region, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_list_evaluation_metrics(self, client: GradientAI) -> None: + with client.regions.with_streaming_response.list_evaluation_metrics() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + region = response.parse() + assert_matches_type(RegionListEvaluationMetricsResponse, region, path=["response"]) + + assert cast(Any, response.is_closed) is True + class TestAsyncRegions: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list(self, async_client: AsyncGradientAI) -> None: region = await async_client.regions.list() assert_matches_type(RegionListResponse, region, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: region = await async_client.regions.list( serves_batch=True, serves_inference=True, @@ -75,7 +105,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncDigitalocean @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: response = await async_client.regions.with_raw_response.list() assert response.is_closed is True @@ -85,7 +115,7 @@ async def test_raw_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: async with async_client.regions.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -94,3 +124,31 @@ async def test_streaming_response_list(self, async_client: AsyncDigitaloceanGena assert_matches_type(RegionListResponse, region, path=["response"]) assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_method_list_evaluation_metrics(self, async_client: AsyncGradientAI) -> None: + region = await async_client.regions.list_evaluation_metrics() + assert_matches_type(RegionListEvaluationMetricsResponse, region, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_list_evaluation_metrics(self, async_client: AsyncGradientAI) -> None: + response = await async_client.regions.with_raw_response.list_evaluation_metrics() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + region = await response.parse() + assert_matches_type(RegionListEvaluationMetricsResponse, region, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_list_evaluation_metrics(self, async_client: AsyncGradientAI) -> None: + async with async_client.regions.with_streaming_response.list_evaluation_metrics() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + region = await response.parse() + assert_matches_type(RegionListEvaluationMetricsResponse, region, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/conftest.py b/tests/conftest.py index abd9aa51..23079a7e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,20 +1,24 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + from __future__ import annotations import os import logging from typing import TYPE_CHECKING, Iterator, AsyncIterator +import httpx import pytest from pytest_asyncio import is_async_test -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK +from gradientai import GradientAI, AsyncGradientAI, DefaultAioHttpClient +from gradientai._utils import is_dict if TYPE_CHECKING: from _pytest.fixtures import FixtureRequest # pyright: ignore[reportPrivateImportUsage] pytest.register_assert_rewrite("tests.utils") -logging.getLogger("digitalocean_genai_sdk").setLevel(logging.DEBUG) +logging.getLogger("gradientai").setLevel(logging.DEBUG) # automatically add `pytest.mark.asyncio()` to all of our async tests @@ -25,6 +29,19 @@ def pytest_collection_modifyitems(items: list[pytest.Function]) -> None: for async_test in pytest_asyncio_tests: async_test.add_marker(session_scope_marker, append=False) + # We skip tests that use both the aiohttp client and respx_mock as respx_mock + # doesn't support custom transports. + for item in items: + if "async_client" not in item.fixturenames or "respx_mock" not in item.fixturenames: + continue + + if not hasattr(item, "callspec"): + continue + + async_client_param = item.callspec.params.get("async_client") + if is_dict(async_client_param) and async_client_param.get("http_client") == "aiohttp": + item.add_marker(pytest.mark.skip(reason="aiohttp client is not compatible with respx_mock")) + base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -32,22 +49,36 @@ def pytest_collection_modifyitems(items: list[pytest.Function]) -> None: @pytest.fixture(scope="session") -def client(request: FixtureRequest) -> Iterator[DigitaloceanGenaiSDK]: +def client(request: FixtureRequest) -> Iterator[GradientAI]: strict = getattr(request, "param", True) if not isinstance(strict, bool): raise TypeError(f"Unexpected fixture parameter type {type(strict)}, expected {bool}") - with DigitaloceanGenaiSDK(base_url=base_url, api_key=api_key, _strict_response_validation=strict) as client: + with GradientAI(base_url=base_url, api_key=api_key, _strict_response_validation=strict) as client: yield client @pytest.fixture(scope="session") -async def async_client(request: FixtureRequest) -> AsyncIterator[AsyncDigitaloceanGenaiSDK]: - strict = getattr(request, "param", True) - if not isinstance(strict, bool): - raise TypeError(f"Unexpected fixture parameter type {type(strict)}, expected {bool}") - - async with AsyncDigitaloceanGenaiSDK( - base_url=base_url, api_key=api_key, _strict_response_validation=strict +async def async_client(request: FixtureRequest) -> AsyncIterator[AsyncGradientAI]: + param = getattr(request, "param", True) + + # defaults + strict = True + http_client: None | httpx.AsyncClient = None + + if isinstance(param, bool): + strict = param + elif is_dict(param): + strict = param.get("strict", True) + assert isinstance(strict, bool) + + http_client_type = param.get("http_client", "httpx") + if http_client_type == "aiohttp": + http_client = DefaultAioHttpClient() + else: + raise TypeError(f"Unexpected fixture parameter type {type(param)}, expected bool or dict") + + async with AsyncGradientAI( + base_url=base_url, api_key=api_key, _strict_response_validation=strict, http_client=http_client ) as client: yield client diff --git a/tests/test_client.py b/tests/test_client.py index d6412ded..f19a5edb 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -21,20 +21,16 @@ from respx import MockRouter from pydantic import ValidationError -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK, APIResponseValidationError -from digitalocean_genai_sdk._types import Omit -from digitalocean_genai_sdk._models import BaseModel, FinalRequestOptions -from digitalocean_genai_sdk._constants import RAW_RESPONSE_HEADER -from digitalocean_genai_sdk._exceptions import ( - APIStatusError, - APITimeoutError, - DigitaloceanGenaiSDKError, - APIResponseValidationError, -) -from digitalocean_genai_sdk._base_client import ( +from gradientai import GradientAI, AsyncGradientAI, APIResponseValidationError +from gradientai._types import Omit +from gradientai._models import BaseModel, FinalRequestOptions +from gradientai._exceptions import APIStatusError, APITimeoutError, GradientAIError, APIResponseValidationError +from gradientai._base_client import ( DEFAULT_TIMEOUT, HTTPX_DEFAULT_TIMEOUT, BaseClient, + DefaultHttpxClient, + DefaultAsyncHttpxClient, make_request_options, ) @@ -54,7 +50,7 @@ def _low_retry_timeout(*_args: Any, **_kwargs: Any) -> float: return 0.1 -def _get_open_connections(client: DigitaloceanGenaiSDK | AsyncDigitaloceanGenaiSDK) -> int: +def _get_open_connections(client: GradientAI | AsyncGradientAI) -> int: transport = client._client._transport assert isinstance(transport, httpx.HTTPTransport) or isinstance(transport, httpx.AsyncHTTPTransport) @@ -62,8 +58,8 @@ def _get_open_connections(client: DigitaloceanGenaiSDK | AsyncDigitaloceanGenaiS return len(pool._requests) -class TestDigitaloceanGenaiSDK: - client = DigitaloceanGenaiSDK(base_url=base_url, api_key=api_key, _strict_response_validation=True) +class TestGradientAI: + client = GradientAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) @pytest.mark.respx(base_url=base_url) def test_raw_response(self, respx_mock: MockRouter) -> None: @@ -110,7 +106,7 @@ def test_copy_default_options(self) -> None: assert isinstance(self.client.timeout, httpx.Timeout) def test_copy_default_headers(self) -> None: - client = DigitaloceanGenaiSDK( + client = GradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} ) assert client.default_headers["X-Foo"] == "bar" @@ -144,7 +140,7 @@ def test_copy_default_headers(self) -> None: client.copy(set_default_headers={}, default_headers={"X-Foo": "Bar"}) def test_copy_default_query(self) -> None: - client = DigitaloceanGenaiSDK( + client = GradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"foo": "bar"} ) assert _get_params(client)["foo"] == "bar" @@ -235,10 +231,10 @@ def add_leak(leaks: list[tracemalloc.StatisticDiff], diff: tracemalloc.Statistic # to_raw_response_wrapper leaks through the @functools.wraps() decorator. # # removing the decorator fixes the leak for reasons we don't understand. - "digitalocean_genai_sdk/_legacy_response.py", - "digitalocean_genai_sdk/_response.py", + "gradientai/_legacy_response.py", + "gradientai/_response.py", # pydantic.BaseModel.model_dump || pydantic.BaseModel.dict leak memory for some reason. - "digitalocean_genai_sdk/_compat.py", + "gradientai/_compat.py", # Standard library leaks we don't care about. "/logging/__init__.py", ] @@ -269,7 +265,7 @@ def test_request_timeout(self) -> None: assert timeout == httpx.Timeout(100.0) def test_client_timeout_option(self) -> None: - client = DigitaloceanGenaiSDK( + client = GradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, timeout=httpx.Timeout(0) ) @@ -280,7 +276,7 @@ def test_client_timeout_option(self) -> None: def test_http_client_timeout_option(self) -> None: # custom timeout given to the httpx client should be used with httpx.Client(timeout=None) as http_client: - client = DigitaloceanGenaiSDK( + client = GradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client ) @@ -290,7 +286,7 @@ def test_http_client_timeout_option(self) -> None: # no timeout given to the httpx client should not use the httpx default with httpx.Client() as http_client: - client = DigitaloceanGenaiSDK( + client = GradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client ) @@ -300,7 +296,7 @@ def test_http_client_timeout_option(self) -> None: # explicitly passing the default timeout currently results in it being ignored with httpx.Client(timeout=HTTPX_DEFAULT_TIMEOUT) as http_client: - client = DigitaloceanGenaiSDK( + client = GradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client ) @@ -311,7 +307,7 @@ def test_http_client_timeout_option(self) -> None: async def test_invalid_http_client(self) -> None: with pytest.raises(TypeError, match="Invalid `http_client` arg"): async with httpx.AsyncClient() as http_client: - DigitaloceanGenaiSDK( + GradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, @@ -319,14 +315,14 @@ async def test_invalid_http_client(self) -> None: ) def test_default_headers_option(self) -> None: - client = DigitaloceanGenaiSDK( + client = GradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} ) request = client._build_request(FinalRequestOptions(method="get", url="/foo")) assert request.headers.get("x-foo") == "bar" assert request.headers.get("x-stainless-lang") == "python" - client2 = DigitaloceanGenaiSDK( + client2 = GradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, @@ -340,17 +336,17 @@ def test_default_headers_option(self) -> None: assert request.headers.get("x-stainless-lang") == "my-overriding-header" def test_validate_headers(self) -> None: - client = DigitaloceanGenaiSDK(base_url=base_url, api_key=api_key, _strict_response_validation=True) + client = GradientAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) request = client._build_request(FinalRequestOptions(method="get", url="/foo")) assert request.headers.get("Authorization") == f"Bearer {api_key}" - with pytest.raises(DigitaloceanGenaiSDKError): - with update_env(**{"DIGITALOCEAN_GENAI_SDK_API_KEY": Omit()}): - client2 = DigitaloceanGenaiSDK(base_url=base_url, api_key=None, _strict_response_validation=True) + with pytest.raises(GradientAIError): + with update_env(**{"GRADIENTAI_API_KEY": Omit()}): + client2 = GradientAI(base_url=base_url, api_key=None, _strict_response_validation=True) _ = client2 def test_default_query_option(self) -> None: - client = DigitaloceanGenaiSDK( + client = GradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"query_param": "bar"} ) request = client._build_request(FinalRequestOptions(method="get", url="/foo")) @@ -464,7 +460,7 @@ def test_request_extra_query(self) -> None: params = dict(request.url.params) assert params == {"foo": "2"} - def test_multipart_repeating_array(self, client: DigitaloceanGenaiSDK) -> None: + def test_multipart_repeating_array(self, client: GradientAI) -> None: request = client._build_request( FinalRequestOptions.construct( method="get", @@ -551,9 +547,7 @@ class Model(BaseModel): assert response.foo == 2 def test_base_url_setter(self) -> None: - client = DigitaloceanGenaiSDK( - base_url="https://example.com/from_init", api_key=api_key, _strict_response_validation=True - ) + client = GradientAI(base_url="https://example.com/from_init", api_key=api_key, _strict_response_validation=True) assert client.base_url == "https://example.com/from_init/" client.base_url = "https://example.com/from_setter" # type: ignore[assignment] @@ -561,17 +555,17 @@ def test_base_url_setter(self) -> None: assert client.base_url == "https://example.com/from_setter/" def test_base_url_env(self) -> None: - with update_env(DIGITALOCEAN_GENAI_SDK_BASE_URL="http://localhost:5000/from/env"): - client = DigitaloceanGenaiSDK(api_key=api_key, _strict_response_validation=True) + with update_env(GRADIENT_AI_BASE_URL="http://localhost:5000/from/env"): + client = GradientAI(api_key=api_key, _strict_response_validation=True) assert client.base_url == "http://localhost:5000/from/env/" @pytest.mark.parametrize( "client", [ - DigitaloceanGenaiSDK( + GradientAI( base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True ), - DigitaloceanGenaiSDK( + GradientAI( base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True, @@ -580,7 +574,7 @@ def test_base_url_env(self) -> None: ], ids=["standard", "custom http client"], ) - def test_base_url_trailing_slash(self, client: DigitaloceanGenaiSDK) -> None: + def test_base_url_trailing_slash(self, client: GradientAI) -> None: request = client._build_request( FinalRequestOptions( method="post", @@ -593,10 +587,10 @@ def test_base_url_trailing_slash(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.parametrize( "client", [ - DigitaloceanGenaiSDK( + GradientAI( base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True ), - DigitaloceanGenaiSDK( + GradientAI( base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True, @@ -605,7 +599,7 @@ def test_base_url_trailing_slash(self, client: DigitaloceanGenaiSDK) -> None: ], ids=["standard", "custom http client"], ) - def test_base_url_no_trailing_slash(self, client: DigitaloceanGenaiSDK) -> None: + def test_base_url_no_trailing_slash(self, client: GradientAI) -> None: request = client._build_request( FinalRequestOptions( method="post", @@ -618,10 +612,10 @@ def test_base_url_no_trailing_slash(self, client: DigitaloceanGenaiSDK) -> None: @pytest.mark.parametrize( "client", [ - DigitaloceanGenaiSDK( + GradientAI( base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True ), - DigitaloceanGenaiSDK( + GradientAI( base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True, @@ -630,7 +624,7 @@ def test_base_url_no_trailing_slash(self, client: DigitaloceanGenaiSDK) -> None: ], ids=["standard", "custom http client"], ) - def test_absolute_request_url(self, client: DigitaloceanGenaiSDK) -> None: + def test_absolute_request_url(self, client: GradientAI) -> None: request = client._build_request( FinalRequestOptions( method="post", @@ -641,7 +635,7 @@ def test_absolute_request_url(self, client: DigitaloceanGenaiSDK) -> None: assert request.url == "https://myapi.com/foo" def test_copied_client_does_not_close_http(self) -> None: - client = DigitaloceanGenaiSDK(base_url=base_url, api_key=api_key, _strict_response_validation=True) + client = GradientAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) assert not client.is_closed() copied = client.copy() @@ -652,7 +646,7 @@ def test_copied_client_does_not_close_http(self) -> None: assert not client.is_closed() def test_client_context_manager(self) -> None: - client = DigitaloceanGenaiSDK(base_url=base_url, api_key=api_key, _strict_response_validation=True) + client = GradientAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) with client as c2: assert c2 is client assert not c2.is_closed() @@ -673,7 +667,7 @@ class Model(BaseModel): def test_client_max_retries_validation(self) -> None: with pytest.raises(TypeError, match=r"max_retries cannot be None"): - DigitaloceanGenaiSDK( + GradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, max_retries=cast(Any, None) ) @@ -684,12 +678,12 @@ class Model(BaseModel): respx_mock.get("/foo").mock(return_value=httpx.Response(200, text="my-custom-format")) - strict_client = DigitaloceanGenaiSDK(base_url=base_url, api_key=api_key, _strict_response_validation=True) + strict_client = GradientAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) with pytest.raises(APIResponseValidationError): strict_client.get("/foo", cast_to=Model) - client = DigitaloceanGenaiSDK(base_url=base_url, api_key=api_key, _strict_response_validation=False) + client = GradientAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) response = client.get("/foo", cast_to=Model) assert isinstance(response, str) # type: ignore[unreachable] @@ -717,48 +711,39 @@ class Model(BaseModel): ) @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str, timeout: float) -> None: - client = DigitaloceanGenaiSDK(base_url=base_url, api_key=api_key, _strict_response_validation=True) + client = GradientAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) headers = httpx.Headers({"retry-after": retry_after}) options = FinalRequestOptions(method="get", url="/foo", max_retries=3) calculated = client._calculate_retry_timeout(remaining_retries, options, headers) assert calculated == pytest.approx(timeout, 0.5 * 0.875) # pyright: ignore[reportUnknownMemberType] - @mock.patch("digitalocean_genai_sdk._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> None: + def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, client: GradientAI) -> None: respx_mock.get("/v2/gen-ai/agents/uuid/versions").mock(side_effect=httpx.TimeoutException("Test timeout error")) with pytest.raises(APITimeoutError): - self.client.get( - "/v2/gen-ai/agents/uuid/versions", - cast_to=httpx.Response, - options={"headers": {RAW_RESPONSE_HEADER: "stream"}}, - ) + client.agents.versions.with_streaming_response.list(uuid="uuid").__enter__() assert _get_open_connections(self.client) == 0 - @mock.patch("digitalocean_genai_sdk._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter) -> None: + def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, client: GradientAI) -> None: respx_mock.get("/v2/gen-ai/agents/uuid/versions").mock(return_value=httpx.Response(500)) with pytest.raises(APIStatusError): - self.client.get( - "/v2/gen-ai/agents/uuid/versions", - cast_to=httpx.Response, - options={"headers": {RAW_RESPONSE_HEADER: "stream"}}, - ) - + client.agents.versions.with_streaming_response.list(uuid="uuid").__enter__() assert _get_open_connections(self.client) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) - @mock.patch("digitalocean_genai_sdk._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) @pytest.mark.parametrize("failure_mode", ["status", "exception"]) def test_retries_taken( self, - client: DigitaloceanGenaiSDK, + client: GradientAI, failures_before_success: int, failure_mode: Literal["status", "exception"], respx_mock: MockRouter, @@ -784,10 +769,10 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) - @mock.patch("digitalocean_genai_sdk._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) def test_omit_retry_count_header( - self, client: DigitaloceanGenaiSDK, failures_before_success: int, respx_mock: MockRouter + self, client: GradientAI, failures_before_success: int, respx_mock: MockRouter ) -> None: client = client.with_options(max_retries=4) @@ -809,10 +794,10 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) - @mock.patch("digitalocean_genai_sdk._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) def test_overwrite_retry_count_header( - self, client: DigitaloceanGenaiSDK, failures_before_success: int, respx_mock: MockRouter + self, client: GradientAI, failures_before_success: int, respx_mock: MockRouter ) -> None: client = client.with_options(max_retries=4) @@ -833,6 +818,28 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: assert response.http_request.headers.get("x-stainless-retry-count") == "42" + def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None: + # Test that the proxy environment variables are set correctly + monkeypatch.setenv("HTTPS_PROXY", "https://example.org") + + client = DefaultHttpxClient() + + mounts = tuple(client._mounts.items()) + assert len(mounts) == 1 + assert mounts[0][0].pattern == "https://" + + @pytest.mark.filterwarnings("ignore:.*deprecated.*:DeprecationWarning") + def test_default_client_creation(self) -> None: + # Ensure that the client can be initialized without any exceptions + DefaultHttpxClient( + verify=True, + cert=None, + trust_env=True, + http1=True, + http2=False, + limits=httpx.Limits(max_connections=100, max_keepalive_connections=20), + ) + @pytest.mark.respx(base_url=base_url) def test_follow_redirects(self, respx_mock: MockRouter) -> None: # Test that the default follow_redirects=True allows following redirects @@ -861,8 +868,8 @@ def test_follow_redirects_disabled(self, respx_mock: MockRouter) -> None: assert exc_info.value.response.headers["Location"] == f"{base_url}/redirected" -class TestAsyncDigitaloceanGenaiSDK: - client = AsyncDigitaloceanGenaiSDK(base_url=base_url, api_key=api_key, _strict_response_validation=True) +class TestAsyncGradientAI: + client = AsyncGradientAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) @pytest.mark.respx(base_url=base_url) @pytest.mark.asyncio @@ -911,7 +918,7 @@ def test_copy_default_options(self) -> None: assert isinstance(self.client.timeout, httpx.Timeout) def test_copy_default_headers(self) -> None: - client = AsyncDigitaloceanGenaiSDK( + client = AsyncGradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} ) assert client.default_headers["X-Foo"] == "bar" @@ -945,7 +952,7 @@ def test_copy_default_headers(self) -> None: client.copy(set_default_headers={}, default_headers={"X-Foo": "Bar"}) def test_copy_default_query(self) -> None: - client = AsyncDigitaloceanGenaiSDK( + client = AsyncGradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"foo": "bar"} ) assert _get_params(client)["foo"] == "bar" @@ -1036,10 +1043,10 @@ def add_leak(leaks: list[tracemalloc.StatisticDiff], diff: tracemalloc.Statistic # to_raw_response_wrapper leaks through the @functools.wraps() decorator. # # removing the decorator fixes the leak for reasons we don't understand. - "digitalocean_genai_sdk/_legacy_response.py", - "digitalocean_genai_sdk/_response.py", + "gradientai/_legacy_response.py", + "gradientai/_response.py", # pydantic.BaseModel.model_dump || pydantic.BaseModel.dict leak memory for some reason. - "digitalocean_genai_sdk/_compat.py", + "gradientai/_compat.py", # Standard library leaks we don't care about. "/logging/__init__.py", ] @@ -1070,7 +1077,7 @@ async def test_request_timeout(self) -> None: assert timeout == httpx.Timeout(100.0) async def test_client_timeout_option(self) -> None: - client = AsyncDigitaloceanGenaiSDK( + client = AsyncGradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, timeout=httpx.Timeout(0) ) @@ -1081,7 +1088,7 @@ async def test_client_timeout_option(self) -> None: async def test_http_client_timeout_option(self) -> None: # custom timeout given to the httpx client should be used async with httpx.AsyncClient(timeout=None) as http_client: - client = AsyncDigitaloceanGenaiSDK( + client = AsyncGradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client ) @@ -1091,7 +1098,7 @@ async def test_http_client_timeout_option(self) -> None: # no timeout given to the httpx client should not use the httpx default async with httpx.AsyncClient() as http_client: - client = AsyncDigitaloceanGenaiSDK( + client = AsyncGradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client ) @@ -1101,7 +1108,7 @@ async def test_http_client_timeout_option(self) -> None: # explicitly passing the default timeout currently results in it being ignored async with httpx.AsyncClient(timeout=HTTPX_DEFAULT_TIMEOUT) as http_client: - client = AsyncDigitaloceanGenaiSDK( + client = AsyncGradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client ) @@ -1112,7 +1119,7 @@ async def test_http_client_timeout_option(self) -> None: def test_invalid_http_client(self) -> None: with pytest.raises(TypeError, match="Invalid `http_client` arg"): with httpx.Client() as http_client: - AsyncDigitaloceanGenaiSDK( + AsyncGradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, @@ -1120,14 +1127,14 @@ def test_invalid_http_client(self) -> None: ) def test_default_headers_option(self) -> None: - client = AsyncDigitaloceanGenaiSDK( + client = AsyncGradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} ) request = client._build_request(FinalRequestOptions(method="get", url="/foo")) assert request.headers.get("x-foo") == "bar" assert request.headers.get("x-stainless-lang") == "python" - client2 = AsyncDigitaloceanGenaiSDK( + client2 = AsyncGradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, @@ -1141,17 +1148,17 @@ def test_default_headers_option(self) -> None: assert request.headers.get("x-stainless-lang") == "my-overriding-header" def test_validate_headers(self) -> None: - client = AsyncDigitaloceanGenaiSDK(base_url=base_url, api_key=api_key, _strict_response_validation=True) + client = AsyncGradientAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) request = client._build_request(FinalRequestOptions(method="get", url="/foo")) assert request.headers.get("Authorization") == f"Bearer {api_key}" - with pytest.raises(DigitaloceanGenaiSDKError): - with update_env(**{"DIGITALOCEAN_GENAI_SDK_API_KEY": Omit()}): - client2 = AsyncDigitaloceanGenaiSDK(base_url=base_url, api_key=None, _strict_response_validation=True) + with pytest.raises(GradientAIError): + with update_env(**{"GRADIENTAI_API_KEY": Omit()}): + client2 = AsyncGradientAI(base_url=base_url, api_key=None, _strict_response_validation=True) _ = client2 def test_default_query_option(self) -> None: - client = AsyncDigitaloceanGenaiSDK( + client = AsyncGradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"query_param": "bar"} ) request = client._build_request(FinalRequestOptions(method="get", url="/foo")) @@ -1265,7 +1272,7 @@ def test_request_extra_query(self) -> None: params = dict(request.url.params) assert params == {"foo": "2"} - def test_multipart_repeating_array(self, async_client: AsyncDigitaloceanGenaiSDK) -> None: + def test_multipart_repeating_array(self, async_client: AsyncGradientAI) -> None: request = async_client._build_request( FinalRequestOptions.construct( method="get", @@ -1352,7 +1359,7 @@ class Model(BaseModel): assert response.foo == 2 def test_base_url_setter(self) -> None: - client = AsyncDigitaloceanGenaiSDK( + client = AsyncGradientAI( base_url="https://example.com/from_init", api_key=api_key, _strict_response_validation=True ) assert client.base_url == "https://example.com/from_init/" @@ -1362,17 +1369,17 @@ def test_base_url_setter(self) -> None: assert client.base_url == "https://example.com/from_setter/" def test_base_url_env(self) -> None: - with update_env(DIGITALOCEAN_GENAI_SDK_BASE_URL="http://localhost:5000/from/env"): - client = AsyncDigitaloceanGenaiSDK(api_key=api_key, _strict_response_validation=True) + with update_env(GRADIENT_AI_BASE_URL="http://localhost:5000/from/env"): + client = AsyncGradientAI(api_key=api_key, _strict_response_validation=True) assert client.base_url == "http://localhost:5000/from/env/" @pytest.mark.parametrize( "client", [ - AsyncDigitaloceanGenaiSDK( + AsyncGradientAI( base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True ), - AsyncDigitaloceanGenaiSDK( + AsyncGradientAI( base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True, @@ -1381,7 +1388,7 @@ def test_base_url_env(self) -> None: ], ids=["standard", "custom http client"], ) - def test_base_url_trailing_slash(self, client: AsyncDigitaloceanGenaiSDK) -> None: + def test_base_url_trailing_slash(self, client: AsyncGradientAI) -> None: request = client._build_request( FinalRequestOptions( method="post", @@ -1394,10 +1401,10 @@ def test_base_url_trailing_slash(self, client: AsyncDigitaloceanGenaiSDK) -> Non @pytest.mark.parametrize( "client", [ - AsyncDigitaloceanGenaiSDK( + AsyncGradientAI( base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True ), - AsyncDigitaloceanGenaiSDK( + AsyncGradientAI( base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True, @@ -1406,7 +1413,7 @@ def test_base_url_trailing_slash(self, client: AsyncDigitaloceanGenaiSDK) -> Non ], ids=["standard", "custom http client"], ) - def test_base_url_no_trailing_slash(self, client: AsyncDigitaloceanGenaiSDK) -> None: + def test_base_url_no_trailing_slash(self, client: AsyncGradientAI) -> None: request = client._build_request( FinalRequestOptions( method="post", @@ -1419,10 +1426,10 @@ def test_base_url_no_trailing_slash(self, client: AsyncDigitaloceanGenaiSDK) -> @pytest.mark.parametrize( "client", [ - AsyncDigitaloceanGenaiSDK( + AsyncGradientAI( base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True ), - AsyncDigitaloceanGenaiSDK( + AsyncGradientAI( base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True, @@ -1431,7 +1438,7 @@ def test_base_url_no_trailing_slash(self, client: AsyncDigitaloceanGenaiSDK) -> ], ids=["standard", "custom http client"], ) - def test_absolute_request_url(self, client: AsyncDigitaloceanGenaiSDK) -> None: + def test_absolute_request_url(self, client: AsyncGradientAI) -> None: request = client._build_request( FinalRequestOptions( method="post", @@ -1442,7 +1449,7 @@ def test_absolute_request_url(self, client: AsyncDigitaloceanGenaiSDK) -> None: assert request.url == "https://myapi.com/foo" async def test_copied_client_does_not_close_http(self) -> None: - client = AsyncDigitaloceanGenaiSDK(base_url=base_url, api_key=api_key, _strict_response_validation=True) + client = AsyncGradientAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) assert not client.is_closed() copied = client.copy() @@ -1454,7 +1461,7 @@ async def test_copied_client_does_not_close_http(self) -> None: assert not client.is_closed() async def test_client_context_manager(self) -> None: - client = AsyncDigitaloceanGenaiSDK(base_url=base_url, api_key=api_key, _strict_response_validation=True) + client = AsyncGradientAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) async with client as c2: assert c2 is client assert not c2.is_closed() @@ -1476,7 +1483,7 @@ class Model(BaseModel): async def test_client_max_retries_validation(self) -> None: with pytest.raises(TypeError, match=r"max_retries cannot be None"): - AsyncDigitaloceanGenaiSDK( + AsyncGradientAI( base_url=base_url, api_key=api_key, _strict_response_validation=True, max_retries=cast(Any, None) ) @@ -1488,12 +1495,12 @@ class Model(BaseModel): respx_mock.get("/foo").mock(return_value=httpx.Response(200, text="my-custom-format")) - strict_client = AsyncDigitaloceanGenaiSDK(base_url=base_url, api_key=api_key, _strict_response_validation=True) + strict_client = AsyncGradientAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) with pytest.raises(APIResponseValidationError): await strict_client.get("/foo", cast_to=Model) - client = AsyncDigitaloceanGenaiSDK(base_url=base_url, api_key=api_key, _strict_response_validation=False) + client = AsyncGradientAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) response = await client.get("/foo", cast_to=Model) assert isinstance(response, str) # type: ignore[unreachable] @@ -1522,49 +1529,44 @@ class Model(BaseModel): @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) @pytest.mark.asyncio async def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str, timeout: float) -> None: - client = AsyncDigitaloceanGenaiSDK(base_url=base_url, api_key=api_key, _strict_response_validation=True) + client = AsyncGradientAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) headers = httpx.Headers({"retry-after": retry_after}) options = FinalRequestOptions(method="get", url="/foo", max_retries=3) calculated = client._calculate_retry_timeout(remaining_retries, options, headers) assert calculated == pytest.approx(timeout, 0.5 * 0.875) # pyright: ignore[reportUnknownMemberType] - @mock.patch("digitalocean_genai_sdk._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - async def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> None: + async def test_retrying_timeout_errors_doesnt_leak( + self, respx_mock: MockRouter, async_client: AsyncGradientAI + ) -> None: respx_mock.get("/v2/gen-ai/agents/uuid/versions").mock(side_effect=httpx.TimeoutException("Test timeout error")) with pytest.raises(APITimeoutError): - await self.client.get( - "/v2/gen-ai/agents/uuid/versions", - cast_to=httpx.Response, - options={"headers": {RAW_RESPONSE_HEADER: "stream"}}, - ) + await async_client.agents.versions.with_streaming_response.list(uuid="uuid").__aenter__() assert _get_open_connections(self.client) == 0 - @mock.patch("digitalocean_genai_sdk._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - async def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter) -> None: + async def test_retrying_status_errors_doesnt_leak( + self, respx_mock: MockRouter, async_client: AsyncGradientAI + ) -> None: respx_mock.get("/v2/gen-ai/agents/uuid/versions").mock(return_value=httpx.Response(500)) with pytest.raises(APIStatusError): - await self.client.get( - "/v2/gen-ai/agents/uuid/versions", - cast_to=httpx.Response, - options={"headers": {RAW_RESPONSE_HEADER: "stream"}}, - ) - + await async_client.agents.versions.with_streaming_response.list(uuid="uuid").__aenter__() assert _get_open_connections(self.client) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) - @mock.patch("digitalocean_genai_sdk._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) @pytest.mark.asyncio @pytest.mark.parametrize("failure_mode", ["status", "exception"]) async def test_retries_taken( self, - async_client: AsyncDigitaloceanGenaiSDK, + async_client: AsyncGradientAI, failures_before_success: int, failure_mode: Literal["status", "exception"], respx_mock: MockRouter, @@ -1590,11 +1592,11 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) - @mock.patch("digitalocean_genai_sdk._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) @pytest.mark.asyncio async def test_omit_retry_count_header( - self, async_client: AsyncDigitaloceanGenaiSDK, failures_before_success: int, respx_mock: MockRouter + self, async_client: AsyncGradientAI, failures_before_success: int, respx_mock: MockRouter ) -> None: client = async_client.with_options(max_retries=4) @@ -1616,11 +1618,11 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) - @mock.patch("digitalocean_genai_sdk._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) @pytest.mark.asyncio async def test_overwrite_retry_count_header( - self, async_client: AsyncDigitaloceanGenaiSDK, failures_before_success: int, respx_mock: MockRouter + self, async_client: AsyncGradientAI, failures_before_success: int, respx_mock: MockRouter ) -> None: client = async_client.with_options(max_retries=4) @@ -1652,8 +1654,8 @@ def test_get_platform(self) -> None: import nest_asyncio import threading - from digitalocean_genai_sdk._utils import asyncify - from digitalocean_genai_sdk._base_client import get_platform + from gradientai._utils import asyncify + from gradientai._base_client import get_platform async def test_main() -> None: result = await asyncify(get_platform)() @@ -1686,6 +1688,28 @@ async def test_main() -> None: time.sleep(0.1) + async def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None: + # Test that the proxy environment variables are set correctly + monkeypatch.setenv("HTTPS_PROXY", "https://example.org") + + client = DefaultAsyncHttpxClient() + + mounts = tuple(client._mounts.items()) + assert len(mounts) == 1 + assert mounts[0][0].pattern == "https://" + + @pytest.mark.filterwarnings("ignore:.*deprecated.*:DeprecationWarning") + async def test_default_client_creation(self) -> None: + # Ensure that the client can be initialized without any exceptions + DefaultAsyncHttpxClient( + verify=True, + cert=None, + trust_env=True, + http1=True, + http2=False, + limits=httpx.Limits(max_connections=100, max_keepalive_connections=20), + ) + @pytest.mark.respx(base_url=base_url) async def test_follow_redirects(self, respx_mock: MockRouter) -> None: # Test that the default follow_redirects=True allows following redirects diff --git a/tests/test_deepcopy.py b/tests/test_deepcopy.py index 317130ef..9d1579a8 100644 --- a/tests/test_deepcopy.py +++ b/tests/test_deepcopy.py @@ -1,4 +1,4 @@ -from digitalocean_genai_sdk._utils import deepcopy_minimal +from gradientai._utils import deepcopy_minimal def assert_different_identities(obj1: object, obj2: object) -> None: diff --git a/tests/test_extract_files.py b/tests/test_extract_files.py index aad87e09..2905d59c 100644 --- a/tests/test_extract_files.py +++ b/tests/test_extract_files.py @@ -4,8 +4,8 @@ import pytest -from digitalocean_genai_sdk._types import FileTypes -from digitalocean_genai_sdk._utils import extract_files +from gradientai._types import FileTypes +from gradientai._utils import extract_files def test_removes_files_from_input() -> None: diff --git a/tests/test_files.py b/tests/test_files.py index f3a07ce0..4a723313 100644 --- a/tests/test_files.py +++ b/tests/test_files.py @@ -4,7 +4,7 @@ import pytest from dirty_equals import IsDict, IsList, IsBytes, IsTuple -from digitalocean_genai_sdk._files import to_httpx_files, async_to_httpx_files +from gradientai._files import to_httpx_files, async_to_httpx_files readme_path = Path(__file__).parent.parent.joinpath("README.md") diff --git a/tests/test_models.py b/tests/test_models.py index 0be34866..28aff1f3 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -7,9 +7,9 @@ import pydantic from pydantic import Field -from digitalocean_genai_sdk._utils import PropertyInfo -from digitalocean_genai_sdk._compat import PYDANTIC_V2, parse_obj, model_dump, model_json -from digitalocean_genai_sdk._models import BaseModel, construct_type +from gradientai._utils import PropertyInfo +from gradientai._compat import PYDANTIC_V2, parse_obj, model_dump, model_json +from gradientai._models import BaseModel, construct_type class BasicModel(BaseModel): diff --git a/tests/test_qs.py b/tests/test_qs.py index 41824698..9080377b 100644 --- a/tests/test_qs.py +++ b/tests/test_qs.py @@ -4,7 +4,7 @@ import pytest -from digitalocean_genai_sdk._qs import Querystring, stringify +from gradientai._qs import Querystring, stringify def test_empty() -> None: diff --git a/tests/test_required_args.py b/tests/test_required_args.py index 379ac794..c4e6b9d8 100644 --- a/tests/test_required_args.py +++ b/tests/test_required_args.py @@ -2,7 +2,7 @@ import pytest -from digitalocean_genai_sdk._utils import required_args +from gradientai._utils import required_args def test_too_many_positional_params() -> None: diff --git a/tests/test_response.py b/tests/test_response.py index 768537aa..1a8f241e 100644 --- a/tests/test_response.py +++ b/tests/test_response.py @@ -6,8 +6,8 @@ import pytest import pydantic -from digitalocean_genai_sdk import BaseModel, DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk._response import ( +from gradientai import BaseModel, GradientAI, AsyncGradientAI +from gradientai._response import ( APIResponse, BaseAPIResponse, AsyncAPIResponse, @@ -15,8 +15,8 @@ AsyncBinaryAPIResponse, extract_response_type, ) -from digitalocean_genai_sdk._streaming import Stream -from digitalocean_genai_sdk._base_client import FinalRequestOptions +from gradientai._streaming import Stream +from gradientai._base_client import FinalRequestOptions class ConcreteBaseAPIResponse(APIResponse[bytes]): ... @@ -37,7 +37,7 @@ def test_extract_response_type_direct_classes() -> None: def test_extract_response_type_direct_class_missing_type_arg() -> None: with pytest.raises( RuntimeError, - match="Expected type to have a type argument at index 0 but it did not", + match="Expected type to have a type argument at index 0 but it did not", ): extract_response_type(AsyncAPIResponse) @@ -56,7 +56,7 @@ def test_extract_response_type_binary_response() -> None: class PydanticModel(pydantic.BaseModel): ... -def test_response_parse_mismatched_basemodel(client: DigitaloceanGenaiSDK) -> None: +def test_response_parse_mismatched_basemodel(client: GradientAI) -> None: response = APIResponse( raw=httpx.Response(200, content=b"foo"), client=client, @@ -68,13 +68,13 @@ def test_response_parse_mismatched_basemodel(client: DigitaloceanGenaiSDK) -> No with pytest.raises( TypeError, - match="Pydantic models must subclass our base model type, e.g. `from digitalocean_genai_sdk import BaseModel`", + match="Pydantic models must subclass our base model type, e.g. `from gradientai import BaseModel`", ): response.parse(to=PydanticModel) @pytest.mark.asyncio -async def test_async_response_parse_mismatched_basemodel(async_client: AsyncDigitaloceanGenaiSDK) -> None: +async def test_async_response_parse_mismatched_basemodel(async_client: AsyncGradientAI) -> None: response = AsyncAPIResponse( raw=httpx.Response(200, content=b"foo"), client=async_client, @@ -86,12 +86,12 @@ async def test_async_response_parse_mismatched_basemodel(async_client: AsyncDigi with pytest.raises( TypeError, - match="Pydantic models must subclass our base model type, e.g. `from digitalocean_genai_sdk import BaseModel`", + match="Pydantic models must subclass our base model type, e.g. `from gradientai import BaseModel`", ): await response.parse(to=PydanticModel) -def test_response_parse_custom_stream(client: DigitaloceanGenaiSDK) -> None: +def test_response_parse_custom_stream(client: GradientAI) -> None: response = APIResponse( raw=httpx.Response(200, content=b"foo"), client=client, @@ -106,7 +106,7 @@ def test_response_parse_custom_stream(client: DigitaloceanGenaiSDK) -> None: @pytest.mark.asyncio -async def test_async_response_parse_custom_stream(async_client: AsyncDigitaloceanGenaiSDK) -> None: +async def test_async_response_parse_custom_stream(async_client: AsyncGradientAI) -> None: response = AsyncAPIResponse( raw=httpx.Response(200, content=b"foo"), client=async_client, @@ -125,7 +125,7 @@ class CustomModel(BaseModel): bar: int -def test_response_parse_custom_model(client: DigitaloceanGenaiSDK) -> None: +def test_response_parse_custom_model(client: GradientAI) -> None: response = APIResponse( raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), client=client, @@ -141,7 +141,7 @@ def test_response_parse_custom_model(client: DigitaloceanGenaiSDK) -> None: @pytest.mark.asyncio -async def test_async_response_parse_custom_model(async_client: AsyncDigitaloceanGenaiSDK) -> None: +async def test_async_response_parse_custom_model(async_client: AsyncGradientAI) -> None: response = AsyncAPIResponse( raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), client=async_client, @@ -156,7 +156,7 @@ async def test_async_response_parse_custom_model(async_client: AsyncDigitalocean assert obj.bar == 2 -def test_response_parse_annotated_type(client: DigitaloceanGenaiSDK) -> None: +def test_response_parse_annotated_type(client: GradientAI) -> None: response = APIResponse( raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), client=client, @@ -173,7 +173,7 @@ def test_response_parse_annotated_type(client: DigitaloceanGenaiSDK) -> None: assert obj.bar == 2 -async def test_async_response_parse_annotated_type(async_client: AsyncDigitaloceanGenaiSDK) -> None: +async def test_async_response_parse_annotated_type(async_client: AsyncGradientAI) -> None: response = AsyncAPIResponse( raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), client=async_client, @@ -201,7 +201,7 @@ async def test_async_response_parse_annotated_type(async_client: AsyncDigitaloce ("FalSe", False), ], ) -def test_response_parse_bool(client: DigitaloceanGenaiSDK, content: str, expected: bool) -> None: +def test_response_parse_bool(client: GradientAI, content: str, expected: bool) -> None: response = APIResponse( raw=httpx.Response(200, content=content), client=client, @@ -226,7 +226,7 @@ def test_response_parse_bool(client: DigitaloceanGenaiSDK, content: str, expecte ("FalSe", False), ], ) -async def test_async_response_parse_bool(client: AsyncDigitaloceanGenaiSDK, content: str, expected: bool) -> None: +async def test_async_response_parse_bool(client: AsyncGradientAI, content: str, expected: bool) -> None: response = AsyncAPIResponse( raw=httpx.Response(200, content=content), client=client, @@ -245,7 +245,7 @@ class OtherModel(BaseModel): @pytest.mark.parametrize("client", [False], indirect=True) # loose validation -def test_response_parse_expect_model_union_non_json_content(client: DigitaloceanGenaiSDK) -> None: +def test_response_parse_expect_model_union_non_json_content(client: GradientAI) -> None: response = APIResponse( raw=httpx.Response(200, content=b"foo", headers={"Content-Type": "application/text"}), client=client, @@ -262,9 +262,7 @@ def test_response_parse_expect_model_union_non_json_content(client: Digitalocean @pytest.mark.asyncio @pytest.mark.parametrize("async_client", [False], indirect=True) # loose validation -async def test_async_response_parse_expect_model_union_non_json_content( - async_client: AsyncDigitaloceanGenaiSDK, -) -> None: +async def test_async_response_parse_expect_model_union_non_json_content(async_client: AsyncGradientAI) -> None: response = AsyncAPIResponse( raw=httpx.Response(200, content=b"foo", headers={"Content-Type": "application/text"}), client=async_client, diff --git a/tests/test_streaming.py b/tests/test_streaming.py index e707c674..cdb41a77 100644 --- a/tests/test_streaming.py +++ b/tests/test_streaming.py @@ -5,13 +5,13 @@ import httpx import pytest -from digitalocean_genai_sdk import DigitaloceanGenaiSDK, AsyncDigitaloceanGenaiSDK -from digitalocean_genai_sdk._streaming import Stream, AsyncStream, ServerSentEvent +from gradientai import GradientAI, AsyncGradientAI +from gradientai._streaming import Stream, AsyncStream, ServerSentEvent @pytest.mark.asyncio @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) -async def test_basic(sync: bool, client: DigitaloceanGenaiSDK, async_client: AsyncDigitaloceanGenaiSDK) -> None: +async def test_basic(sync: bool, client: GradientAI, async_client: AsyncGradientAI) -> None: def body() -> Iterator[bytes]: yield b"event: completion\n" yield b'data: {"foo":true}\n' @@ -28,9 +28,7 @@ def body() -> Iterator[bytes]: @pytest.mark.asyncio @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) -async def test_data_missing_event( - sync: bool, client: DigitaloceanGenaiSDK, async_client: AsyncDigitaloceanGenaiSDK -) -> None: +async def test_data_missing_event(sync: bool, client: GradientAI, async_client: AsyncGradientAI) -> None: def body() -> Iterator[bytes]: yield b'data: {"foo":true}\n' yield b"\n" @@ -46,9 +44,7 @@ def body() -> Iterator[bytes]: @pytest.mark.asyncio @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) -async def test_event_missing_data( - sync: bool, client: DigitaloceanGenaiSDK, async_client: AsyncDigitaloceanGenaiSDK -) -> None: +async def test_event_missing_data(sync: bool, client: GradientAI, async_client: AsyncGradientAI) -> None: def body() -> Iterator[bytes]: yield b"event: ping\n" yield b"\n" @@ -64,9 +60,7 @@ def body() -> Iterator[bytes]: @pytest.mark.asyncio @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) -async def test_multiple_events( - sync: bool, client: DigitaloceanGenaiSDK, async_client: AsyncDigitaloceanGenaiSDK -) -> None: +async def test_multiple_events(sync: bool, client: GradientAI, async_client: AsyncGradientAI) -> None: def body() -> Iterator[bytes]: yield b"event: ping\n" yield b"\n" @@ -88,9 +82,7 @@ def body() -> Iterator[bytes]: @pytest.mark.asyncio @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) -async def test_multiple_events_with_data( - sync: bool, client: DigitaloceanGenaiSDK, async_client: AsyncDigitaloceanGenaiSDK -) -> None: +async def test_multiple_events_with_data(sync: bool, client: GradientAI, async_client: AsyncGradientAI) -> None: def body() -> Iterator[bytes]: yield b"event: ping\n" yield b'data: {"foo":true}\n' @@ -115,7 +107,7 @@ def body() -> Iterator[bytes]: @pytest.mark.asyncio @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) async def test_multiple_data_lines_with_empty_line( - sync: bool, client: DigitaloceanGenaiSDK, async_client: AsyncDigitaloceanGenaiSDK + sync: bool, client: GradientAI, async_client: AsyncGradientAI ) -> None: def body() -> Iterator[bytes]: yield b"event: ping\n" @@ -138,9 +130,7 @@ def body() -> Iterator[bytes]: @pytest.mark.asyncio @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) -async def test_data_json_escaped_double_new_line( - sync: bool, client: DigitaloceanGenaiSDK, async_client: AsyncDigitaloceanGenaiSDK -) -> None: +async def test_data_json_escaped_double_new_line(sync: bool, client: GradientAI, async_client: AsyncGradientAI) -> None: def body() -> Iterator[bytes]: yield b"event: ping\n" yield b'data: {"foo": "my long\\n\\ncontent"}' @@ -157,9 +147,7 @@ def body() -> Iterator[bytes]: @pytest.mark.asyncio @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) -async def test_multiple_data_lines( - sync: bool, client: DigitaloceanGenaiSDK, async_client: AsyncDigitaloceanGenaiSDK -) -> None: +async def test_multiple_data_lines(sync: bool, client: GradientAI, async_client: AsyncGradientAI) -> None: def body() -> Iterator[bytes]: yield b"event: ping\n" yield b"data: {\n" @@ -179,8 +167,8 @@ def body() -> Iterator[bytes]: @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) async def test_special_new_line_character( sync: bool, - client: DigitaloceanGenaiSDK, - async_client: AsyncDigitaloceanGenaiSDK, + client: GradientAI, + async_client: AsyncGradientAI, ) -> None: def body() -> Iterator[bytes]: yield b'data: {"content":" culpa"}\n' @@ -210,8 +198,8 @@ def body() -> Iterator[bytes]: @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) async def test_multi_byte_character_multiple_chunks( sync: bool, - client: DigitaloceanGenaiSDK, - async_client: AsyncDigitaloceanGenaiSDK, + client: GradientAI, + async_client: AsyncGradientAI, ) -> None: def body() -> Iterator[bytes]: yield b'data: {"content":"' @@ -251,8 +239,8 @@ def make_event_iterator( content: Iterator[bytes], *, sync: bool, - client: DigitaloceanGenaiSDK, - async_client: AsyncDigitaloceanGenaiSDK, + client: GradientAI, + async_client: AsyncGradientAI, ) -> Iterator[ServerSentEvent] | AsyncIterator[ServerSentEvent]: if sync: return Stream(cast_to=object, client=client, response=httpx.Response(200, content=content))._iter_events() diff --git a/tests/test_transform.py b/tests/test_transform.py index 3c29084e..825fe048 100644 --- a/tests/test_transform.py +++ b/tests/test_transform.py @@ -8,15 +8,15 @@ import pytest -from digitalocean_genai_sdk._types import NOT_GIVEN, Base64FileInput -from digitalocean_genai_sdk._utils import ( +from gradientai._types import NOT_GIVEN, Base64FileInput +from gradientai._utils import ( PropertyInfo, transform as _transform, parse_datetime, async_transform as _async_transform, ) -from digitalocean_genai_sdk._compat import PYDANTIC_V2 -from digitalocean_genai_sdk._models import BaseModel +from gradientai._compat import PYDANTIC_V2 +from gradientai._models import BaseModel _T = TypeVar("_T") diff --git a/tests/test_utils/test_proxy.py b/tests/test_utils/test_proxy.py index 6fe8c808..3856b2c9 100644 --- a/tests/test_utils/test_proxy.py +++ b/tests/test_utils/test_proxy.py @@ -2,7 +2,7 @@ from typing import Any from typing_extensions import override -from digitalocean_genai_sdk._utils import LazyProxy +from gradientai._utils import LazyProxy class RecursiveLazyProxy(LazyProxy[Any]): diff --git a/tests/test_utils/test_typing.py b/tests/test_utils/test_typing.py index 72bf3422..66ad064f 100644 --- a/tests/test_utils/test_typing.py +++ b/tests/test_utils/test_typing.py @@ -2,7 +2,7 @@ from typing import Generic, TypeVar, cast -from digitalocean_genai_sdk._utils import extract_type_var_from_base +from gradientai._utils import extract_type_var_from_base _T = TypeVar("_T") _T2 = TypeVar("_T2") diff --git a/tests/utils.py b/tests/utils.py index e795e2e8..b539ed2c 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -8,8 +8,8 @@ from datetime import date, datetime from typing_extensions import Literal, get_args, get_origin, assert_type -from digitalocean_genai_sdk._types import Omit, NoneType -from digitalocean_genai_sdk._utils import ( +from gradientai._types import Omit, NoneType +from gradientai._utils import ( is_dict, is_list, is_list_type, @@ -18,8 +18,8 @@ is_annotated_type, is_type_alias_type, ) -from digitalocean_genai_sdk._compat import PYDANTIC_V2, field_outer_type, get_model_fields -from digitalocean_genai_sdk._models import BaseModel +from gradientai._compat import PYDANTIC_V2, field_outer_type, get_model_fields +from gradientai._models import BaseModel BaseModelT = TypeVar("BaseModelT", bound=BaseModel)